//
//  LiveShowVideoCapture.swift
//  VideoPhotoCapture
//
//  Created by 杨伟诺 on 2020/9/26.
//

import UIKit
import AVFoundation

enum LiveShowVideoCaptureType: String {
    case video
    case audio
}

@objc protocol LiveShowVideoCaptureDelegate{
    
    @objc optional func captureSampleBuffer( _ buffer: CMSampleBuffer, type: String)
}

class LiveShowVideoCapture: VideoCaptureManager {
    
    weak var delegate: LiveShowVideoCaptureDelegate?

    static var liveShowmanager: LiveShowVideoCapture = LiveShowVideoCapture()
    
    private var videoDataOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
    
    private var audioDataOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()
    
    private var audioConnection: AVCaptureConnection?
    
    private var videoConnection: AVCaptureConnection?
    
    private var liveShowQueue = DispatchQueue.init(label: "liveShowQueue")
    
    lazy var liveLayer: AVCaptureVideoPreviewLayer = {
        
        let l = AVCaptureVideoPreviewLayer.init(session: session)
        l.videoGravity = .resizeAspectFill
        l.connection?.videoOrientation = .portrait
        return l
    }()
    
    lazy var liveView: UIView = UIView()
    
    var isStartRunning: Bool = false
    
    var width: Int = 0
    var height: Int = 0
    
    func configurationLiveSession() {
        setupAudio()
        setupVideo()
        guard sessionSetup == .success else {return}
        startCapture()
    }
    
    func startCapture(){
        if !session.isRunning{
            session.startRunning()
        }
        isStartRunning = true
    }
    
    func stopCapture(){
        if session.isRunning{
            session.stopRunning()
        }
        isStartRunning = false
    }
    
    private func setupVideo(){
        
        sessionQueue.async { [self] in
            session.beginConfiguration()
            do {
                var videoDevice: AVCaptureDevice?
                if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) {
                    videoDevice = dualCameraDevice
                } else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) {
                    videoDevice = backCameraDevice
                } else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
                    videoDevice = frontCameraDevice
                }
                
                guard let vd = videoDevice else {
                    sessionSetup = .configurationFailed
                    session.commitConfiguration()
                    return
                }
                let vdInput = try AVCaptureDeviceInput.init(device: vd)
                
                if session.canAddInput(vdInput){
                    session.addInput(vdInput)
                    self.videoDeviceInput = vdInput
                }
                else
                {
                    sessionSetup = .configurationFailed
                    session.commitConfiguration()
                    return
                }
            } catch  {
                sessionSetup = .configurationFailed
                session.commitConfiguration()
                return
            }
            
            videoDataOutput.setSampleBufferDelegate(self, queue: liveShowQueue)
            videoDataOutput.alwaysDiscardsLateVideoFrames = true
            videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]
            
            if session.canAddOutput(videoDataOutput){
                session.addOutput(videoDataOutput)
            }
            setVideoPreset()
            session.commitConfiguration()
            videoConnection = videoDataOutput.connection(with: .video)
            videoConnection?.videoOrientation = .portrait
            
            setupFps(25)
            
            DispatchQueue.main.async {[self] in
                liveView.layer.addSublayer(liveLayer)
            }
        }
    }
    
    private func setVideoPreset(){
        
        if session.canSetSessionPreset(AVCaptureSession.Preset.hd4K3840x2160){
            session.sessionPreset = .hd4K3840x2160
            width = 2160; height = 3840
        }else if session.canSetSessionPreset(AVCaptureSession.Preset.hd1920x1080){
            session.sessionPreset = .hd1920x1080
            width = 1080; height = 1920
        }else if session.canSetSessionPreset(AVCaptureSession.Preset.hd1280x720){
            session.sessionPreset = .hd1280x720
            width = 720; height = 1280
        }else if session.canSetSessionPreset(AVCaptureSession.Preset.iFrame1280x720){
            session.sessionPreset = .iFrame1280x720
            width = 720; height = 1280
        }else{
            session.sessionPreset = .low
            width = 480; height = 720
        }
    }
    
    private func setupFps(_ fps: Int){
        
        let device = videoDeviceInput.device
        
        let max = device.activeFormat.videoSupportedFrameRateRanges.first?.maxFrameRate ?? 0.0
        
        if Int(max) >= fps {
            do {
                try device.lockForConfiguration()
                device.activeVideoMinFrameDuration = CMTime.init(value: 10, timescale: CMTimeScale(fps * 10))
                device.activeVideoMaxFrameDuration = device.activeVideoMinFrameDuration
                device.unlockForConfiguration()
            } catch  {
                
            }
        }
    }
    
    private func setupAudio(){
        
        guard let audioDv = AVCaptureDevice.default(for: .audio) else {return}
        liveShowQueue.async { [self] in
            session.beginConfiguration()
            do {
                let audioInput = try AVCaptureDeviceInput.init(device: audioDv)
                audioDataOutput.setSampleBufferDelegate(self, queue: liveShowQueue)
                if session.canAddInput(audioInput){
                    session.addInput(audioInput)
                    audioDeviceInput = audioInput
                }
                if session.canAddOutput(audioDataOutput){
                    session.addOutput(audioDataOutput)
                }
                audioConnection = audioDataOutput.connection(with: .audio)
                session.commitConfiguration()
            } catch  {
                session.commitConfiguration()
            }
        }
    }
    
}

extension LiveShowVideoCapture: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate{
    
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
        if connection == audioConnection {
            delegate?.captureSampleBuffer?(sampleBuffer, type: LiveShowVideoCaptureType.audio.rawValue)
        }
        else if connection == videoConnection{
            delegate?.captureSampleBuffer?(sampleBuffer, type: LiveShowVideoCaptureType.video.rawValue)
        }
    }
    
}
