//
//  CameraViewController.swift
//  GifUsage
//
//  Created by han on 2021/4/19.
//  Copyright © 2021 Ting Han. All rights reserved.
//

import UIKit
import CoreMedia
import AVFoundation

protocol CameraViewDelegate: NSObjectProtocol {
   func captureOutPutBuffer(_ sampleBuffer: CMSampleBuffer)
   func updateAudioSampleBuffer_ex(_ sampleBuffer: CMSampleBuffer)
}

// 相机调用
class CameraViewController: UIViewController {
    /// 相机摄像数据
    weak var cameraDelegate: CameraViewDelegate?
    
    // 显示录像
    private var replaceView: UIView = UIView()
    var sampleBufferDisplayLayer = AVSampleBufferDisplayLayer()
    
    // 相机
    private var captureSession: AVCaptureSession?
    private var videoInput: AVCaptureDeviceInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?
    private var avCaptureDevice: AVCaptureDevice?
    private var videoConnection: AVCaptureConnection?
    private var audioConnection: AVCaptureConnection?
    
    private var capturePreset: AVCaptureSession.Preset?
    private var isCapturePaused: Bool = true
    
    // 线程
    private let videoOperationQueue = DispatchQueue.init(label: "com.etoutiao.reminder.videocapture", qos: .userInteractive, attributes: .init(rawValue: 0), autoreleaseFrequency: .inherit, target: nil)
    private let audioOperationQueue = DispatchQueue.init(label: "com.etoutiao.reminder.audiocapture", qos: .default, attributes: .init(rawValue: 0), autoreleaseFrequency: .inherit, target: nil)
    
    private let pixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
    
    override func viewDidLoad() {
        super.viewDidLoad()
        view.backgroundColor = .black
        navigationItem.title = "相机启动了吗"
        commonInit()
    }
}

extension CameraViewController  {
    func startCapture() {
        isCapturePaused = false
        guard let session = captureSession, !session.isRunning else { return }
        session.startRunning()
    }
    
    func pauseCapture() {
        isCapturePaused = true
    }
    
    func stopCapture()  {
        isCapturePaused = true
        guard let session = captureSession else { return }
        
        session.stopRunning()
        videoOutput?.setSampleBufferDelegate(nil, queue: nil)
        audioOutput?.setSampleBufferDelegate(nil, queue: nil)
        if let videoInput = videoInput {
            session.removeInput(videoInput)
        }
        if let videoOutput = videoOutput {
            session.removeOutput(videoOutput)
        }
        if let audioOutput = audioOutput {
            session.removeOutput(audioOutput)
        }
        
        videoInput = nil
        videoOutput = nil
        audioOutput = nil
        captureSession = nil
        avCaptureDevice = nil
    }
    
    // 切换摄像头
    func rotateCapture() {
        captureSession?.stopRunning()
        
        let currentCameraPosition = videoInput?.device.position
        let currentPosintion: AVCaptureDevice.Position = currentCameraPosition == .back ? .front : .back
        let devices = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.front).devices
        for device in devices {
            if device.position == currentPosintion {
                avCaptureDevice = device
            }
        }
        
        if let newInput = try? AVCaptureDeviceInput(device: avCaptureDevice!) {
            captureSession?.beginConfiguration()
            captureSession?.sessionPreset = .hd1280x720
            if let input = self.videoInput {
                captureSession?.removeInput(input)
            }
            if captureSession?.canAddInput(newInput) == true {
                captureSession?.addInput(newInput)
                self.videoInput = newInput
            } else {
                if let input = self.videoInput {
                    captureSession?.addInput(input)
                }
            }
            
            if let videoOutput = videoOutput {
                videoConnection = videoOutput.connection(with: .video)
                if videoConnection!.isVideoOrientationSupported { //相机朝上
                    videoConnection!.videoOrientation = .portrait
                }
                videoConnection!.isVideoMirrored = avCaptureDevice!.position == .front
            }
            captureSession?.commitConfiguration()
        }
        
        captureSession?.startRunning()
    }
    
    // 变焦设置
    func focusChange(_ value: CGFloat) {
        do {
            try avCaptureDevice?.lockForConfiguration()
            avCaptureDevice?.videoZoomFactor = value
            avCaptureDevice?.unlockForConfiguration()
        } catch _ {
            return
        }
    }
    
    // 焦点
    func focusPointChanged(_ point: CGPoint) {
        setFocusPointOnDevice(device: avCaptureDevice!, point: point)
    }
    
    func setFocusPointOnDevice(device: AVCaptureDevice, point: CGPoint) {
        do {
            try device.lockForConfiguration()
            if device.isFocusModeSupported(AVCaptureDevice.FocusMode.autoFocus) {
                device.focusMode = AVCaptureDevice.FocusMode.autoFocus
                device.focusPointOfInterest = point
            }
            if device.isExposureModeSupported(AVCaptureDevice.ExposureMode.continuousAutoExposure) {
                device.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
                device.exposurePointOfInterest = point
            }
            device.unlockForConfiguration()
        } catch _ {
            
        }
    }
    
    func updateRenderSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
//        if showFaceLmkPoints {
//            drawFaceLmkPointsToSampleBuffer(sampleBuffer)
//        }
        
        if sampleBufferDisplayLayer.status == .failed {
            sampleBufferDisplayLayer.flushAndRemoveImage()
        }
        
        if sampleBufferDisplayLayer.isReadyForMoreMediaData {
            sampleBufferDisplayLayer.enqueue(sampleBuffer)
        }
    }
    
    private func drawFaceLmkPointsToSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
        
//        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
//            return
//        }
    }
    
    
     func recommendedVideoSettingsForAssetWriter(writingTo outputFileType: AVFileType) -> [String: Any]? {
        return videoOutput?.recommendedVideoSettingsForAssetWriter(writingTo: outputFileType)
    }
     func recommendedAudioSettingsForAssetWriter(writingTo outputFileType: AVFileType) -> [String: Any]? {
        return audioOutput?.recommendedAudioSettingsForAssetWriter(writingTo: outputFileType) as? [String: Any]
    }
    
}

private extension CameraViewController {
    func commonInit() {
        setupPreview()
        DispatchQueue.global().async {
            self.setupSession()
        }
    }
    
    func setupPreview() {
        replaceView.backgroundColor = .black
        replaceView.frame = UIScreen.main.bounds
        view.addSubview(replaceView)
        sampleBufferDisplayLayer.frame = replaceView.bounds
        sampleBufferDisplayLayer.videoGravity = .resizeAspectFill
        sampleBufferDisplayLayer.isOpaque = true
        replaceView.layer.addSublayer(sampleBufferDisplayLayer)
    }
    
    func setupSession() {
        // session
        captureSession = AVCaptureSession()
        captureSession!.beginConfiguration()
      
        setupVideoSession()
        setupAudioSession()
        
        captureSession!.sessionPreset = .hd1280x720
        captureSession!.commitConfiguration()
        captureFrameRate(30)
    }
    
    func setupVideoSession() {
        // 输入设备
        var inputCamera: AVCaptureDevice? = nil
//        let devices = AVCaptureDevice.devices(for: .video)
        let devices = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.front).devices
        
        for device in devices {
            if device.position == .front {
                avCaptureDevice = device
                inputCamera = device
            }
        }
        guard inputCamera != nil else { return }
        
        // session中添加input
        videoInput = try? AVCaptureDeviceInput.init(device: inputCamera!)
        if let videoInput = videoInput, captureSession!.canAddInput(videoInput)  {
            captureSession!.addInput(videoInput)
        }
        
        // session中添加output
        videoOutput = AVCaptureVideoDataOutput()
        videoOutput!.videoSettings = [
            kCVPixelBufferPixelFormatTypeKey as String: pixelFormatType,
//            AVVideoCodecKey as String: AVVideoCodecH264,
//            AVVideoHeightKey: 1080,
//            AVVideoWidthKey: 1080,
        ]
        videoOutput!.setSampleBufferDelegate(self, queue: videoOperationQueue)
        if captureSession!.canAddOutput(videoOutput!) {
            captureSession!.addOutput(videoOutput!)
        }
        
        videoConnection = videoOutput!.connection(with: .video)
        if videoConnection!.isVideoOrientationSupported { //相机朝上
            videoConnection!.videoOrientation = .portrait
        }
        videoConnection!.isVideoMirrored = inputCamera!.position == .front
    }
    
    func setupAudioSession() {
        guard let device = AVCaptureDevice.default(for: .audio) else { return }
        if let input =  try? AVCaptureDeviceInput.init(device: device), captureSession!.canAddInput(input)  {
            captureSession!.addInput(input)
        }
        
        audioOutput = AVCaptureAudioDataOutput()
        audioOutput!.setSampleBufferDelegate(self, queue: audioOperationQueue)
        if captureSession!.canAddOutput(audioOutput!) {
            captureSession!.addOutput(audioOutput!)
        }
        audioConnection = audioOutput!.connection(with: .audio)
    }
    
    func captureFrameRate(_ frameRate: Int) {
        do {
            try avCaptureDevice!.lockForConfiguration()
            avCaptureDevice!.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: Int32(frameRate))
            avCaptureDevice!.activeVideoMinFrameDuration = CMTimeMake(value: 1, timescale: Int32(frameRate))
            avCaptureDevice!.unlockForConfiguration()
        } catch  {
            
        }
    }
}
extension CameraViewController: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard isCapturePaused == false, let session = captureSession, session.isRunning else { return }
        
        if output == videoOutput {
            videoOperationQueue.async { [weak self] in
                self?.cameraDelegate?.captureOutPutBuffer(sampleBuffer)
            }
            
        } else if output == audioOutput {
            audioOperationQueue.async { [weak self] in
                self?.cameraDelegate?.updateAudioSampleBuffer_ex(sampleBuffer)
            }
        }
    }
    func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
    }
}
