//
//  SecondViewController.swift
//  ColorBlinds
//
//  Created by Jordi de Kock on 28-08-16.
//  Copyright © 2016 Jordi de Kock. All rights reserved.
//

import UIKit
import AVFoundation

@available(iOS 11.0, *)
class SecondViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
    var testImage:UIImageView!
    var session: AVCaptureSession!
    var device: AVCaptureDevice!
    var output: AVCaptureVideoDataOutput!
    override func viewDidLoad() {
        super.viewDidLoad()
        
        self.title = "Real Time"
        self.view.backgroundColor = .white
        testImage = UIImageView.init(frame: CGRect(x: 20, y: 20, width: self.view.bounds.size.width - 40, height: 540));
        testImage.image = UIImage(named: "2")
        testImage.clipsToBounds = true
        testImage.contentMode = .scaleAspectFill
        self.view.addSubview(testImage);
        //MARK: - 🌟 搬运2 摄像头capture session
        self.session = AVCaptureSession()
        self.session.sessionPreset = AVCaptureSession.Preset.vga640x480 // not work in iOS simulator
        guard let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back) else {
            print("no device")
            return
        }
        self.device = device
        do {
            let input = try AVCaptureDeviceInput(device: self.device)
            self.session.addInput(input)
        } catch {
            print("no device input")
            return
        }
        self.output = AVCaptureVideoDataOutput()
        self.output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA) ]
        let queue: DispatchQueue = DispatchQueue(label: "videocapturequeue", attributes: [])
        self.output.setSampleBufferDelegate(self, queue: queue)
//            self.output.setSampleBufferDelegate(self.transitioningDelegate, queue: queue)
        self.output.alwaysDiscardsLateVideoFrames = true
        if self.session.canAddOutput(self.output) {
            self.session.addOutput(self.output)
        } else {
            print("could not add a session output")
            return
        }
        do {
            try self.device.lockForConfiguration()
            self.device.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: 20) // 20 fps
            self.device.unlockForConfiguration()
        } catch {
            print("could not configure a device")
            return
        }

        self.session.startRunning()
        //MARK: - 🌟 搬运2结束 摄像头capture session
    }
    //MARK: - 🌟搬运3
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        // Convert a captured image buffer to UIImage.
        guard let buffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            print("could not get a pixel buffer")
            return
        }
        CVPixelBufferLockBaseAddress(buffer, CVPixelBufferLockFlags.readOnly)
        let image = CIImage(cvPixelBuffer: buffer).oriented(CGImagePropertyOrientation.right)
        let capturedImage = UIImage(ciImage: image)
        

        // Show the result.
        DispatchQueue.main.async(execute: {
            self.testImage.image = capturedImage
        })
    }
    //MARK: - 🌟搬运3结束
}
