//
//  AVCaptureEngine.swift
//  onemeetingmac
//
//  Created by 张衡 on 2020/6/16.
//  Copyright © 2020 onemeeting. All rights reserved.
//

import Foundation

@objc protocol AVCaptureEngineDelegte {
    @objc optional func avcaptureEngine_capAudio(channel:AVCaptureAudioChannel)
}

class AVCaptureEngine: NSObject {
    static let shared = AVCaptureEngine()
    
    weak var delegate:AVCaptureEngineDelegte?
    
    var view:NSView!
    
    //创建捕获会话,必须要强引用，否则会被释放
    lazy var session: AVCaptureSession = {
        let ses = AVCaptureSession.init()
        return ses
    }()
    
    //视频数据输出设备
    lazy var videoDataOutput: AVCaptureVideoDataOutput = {
        let put = AVCaptureVideoDataOutput.init()
        return put
    }()
    
    //音频数据输出设备
    lazy var audioDataOutput: AVCaptureAudioDataOutput = {
        let put = AVCaptureAudioDataOutput.init()
        return put
    }()
    
    //录制数据输出设备
    lazy var movieOutput: AVCaptureMovieFileOutput = {
        let put = AVCaptureMovieFileOutput.init()
        let maxTimeDuration = CMTimeMakeWithSeconds(60, preferredTimescale: 30)
        put.maxRecordedDuration = maxTimeDuration
        put.minFreeDiskSpaceLimit = 1024*1025
        return put
    }()
    
    lazy var previewLayer: AVCaptureVideoPreviewLayer = {
        //创建预览图层
        let previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
        return previewLayer
    }()
    
    lazy var videoInput: AVCaptureDeviceInput? = {
        let device = AVCaptureDevice.default(for: AVMediaType.video)
        let input = try? AVCaptureDeviceInput.init(device: device!)
        return input
    }()
    
    func setupMovieFileOutput() {
        session.removeOutput(self.movieOutput)
        // 创建写入文件的输出
        let fileOutput = AVCaptureMovieFileOutput()
        movieOutput = fileOutput
        
        let connection = fileOutput.connection(with: .video)
        connection?.automaticallyAdjustsVideoMirroring = true
        
        session.beginConfiguration()
        if session.canAddOutput(fileOutput) {
            session.addOutput(fileOutput)
        }
        session.commitConfiguration()
        
        // 直接开始写入文件
        let filePath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! + "/video.mp4"
        let fileUrl = URL(fileURLWithPath: filePath)
        fileOutput.startRecording(to: fileUrl, recordingDelegate: self)
        
    }
    
    //指定摄像头方向获取摄像头
    func getVideoDevice(position:AVCaptureDevice.Position) -> AVCaptureDevice? {
        let devices = AVCaptureDevice.devices()
        for (_,device) in devices.enumerated() {
            if device.position == position {
                return device
            }
        }
        return nil
    }
    
    
    func changeCamera(videoInput:AVCaptureDeviceInput?) {
        //1.取出之前镜头的方向
        guard let videoInput = videoInput else { return}
        let oritation: AVCaptureDevice.Position = videoInput.device.position == .front ? .back : .front
        let devices = AVCaptureDevice.devices()
        guard let device = devices.filter({$0.position == oritation}).first else {return}
        guard let newInput = try? AVCaptureDeviceInput(device: device) else {return}
        //移出之前的input 添加新的input
        session.beginConfiguration()
        session.removeInput(videoInput)
        if session.canAddInput(newInput) {
            session.addInput(newInput)
            self.videoInput = newInput
        }
        session.commitConfiguration()
        
    }
    
    func startRunning()  {
        self.session.startRunning()
    }
    
    func stopCapturing()  {
        self.session.stopRunning()
        self.previewLayer.removeFromSuperlayer()
    }
    
    func setupCaptureVideoPreview(view:NSView) {
        self.setupVideoInputOutput()
        //创建预览图层
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        //设置previewLayer的属性
        previewLayer.frame = view.bounds
        //将图层添加到控制器的view的layer中
        view.layer?.insertSublayer(previewLayer, at: 0)
        self.previewLayer = previewLayer
        
    }
    
    func setupCaptureAudioAndRun() {
        self.setupAudioInputOutput()
        self.session.startRunning()
    }
    
    private func addInputOutputToSession(_ input : AVCaptureInput, _ output : AVCaptureOutput){
        session.beginConfiguration()
        if session.canAddInput(input) {
            session.addInput(input)
        }
        if session.canAddOutput(output) {
            session.addOutput(output)
        }
        session.commitConfiguration()
    }
    
    func setupVideoInputOutput() {
        //获取视频设备
        guard let camara = AVCaptureDevice.default(for: AVMediaType.video) else { return  }
        //创建对应频设备输入对象
        guard let input = try? AVCaptureDeviceInput(device: camara) else {return}
        //2.添加视频的输出
        let output = AVCaptureVideoDataOutput()
        output.setSampleBufferDelegate(self, queue: DispatchQueue.global())
        output.connection(with: AVMediaType.video)
        self.videoDataOutput = output
        //3.添加输入&输出
        self.addInputOutputToSession(input, output)
    }
    
    func setupAudioInputOutput() {
        //1.创建输入
        //获取声音设备
        guard let device = AVCaptureDevice.default(for: AVMediaType.audio) else { return }
        //创建对应音频设备输入对象
        guard let input = try?AVCaptureDeviceInput(device:device) else { return }

        //2.创建输出
        let output = AVCaptureAudioDataOutput()
        output.setSampleBufferDelegate(self, queue: DispatchQueue.global())
        self.audioDataOutput = output
        //3.添加输入输出
        self.addInputOutputToSession(input, output)
    }
    
    
    func getDevice() {
        //前置摄像头
        if #available(OSX 10.15, *) {
            AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: AVCaptureDevice.Position.front)
            AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: AVCaptureDevice.Position.back)
            AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInMicrophone, for: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)
        } else {
            // Fallback on earlier versions
        }
    }
    
    
    func dataFromCMSampleBufferRef(sampleBuffer:CMSampleBuffer) -> CGImage? {
        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            return nil
        }
        
        CVPixelBufferLockBaseAddress(imageBuffer, [])
        let baseAddress =  CVPixelBufferGetBaseAddress(imageBuffer)
        let bytesPerRow =  CVPixelBufferGetBytesPerRow(imageBuffer)
        let width = CVPixelBufferGetWidth(imageBuffer)
        let height = CVPixelBufferGetHeight(imageBuffer)
         CVPixelBufferUnlockBaseAddress(imageBuffer, [])
        let colorSpace = CGColorSpaceCreateDeviceGray()
        let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
        let quartzImage = context?.makeImage()
        CVPixelBufferUnlockBaseAddress(imageBuffer, [])
        return quartzImage
    }
}

extension AVCaptureEngine:AVCaptureVideoDataOutputSampleBufferDelegate ,AVCaptureAudioDataOutputSampleBufferDelegate{
    //输出
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
        if self.videoDataOutput.connection(with: AVMediaType.video) == connection {
            //print("采集视频数据")
            
            guard let imageRef = self.dataFromCMSampleBufferRef(sampleBuffer: sampleBuffer) else {
                return
            }
            /*
            let queue = DispatchQueue.global(qos: DispatchQoS.QoSClass.background)
            let timer = DispatchSource.makeTimerSource(flags: DispatchSource.TimerFlags.init(rawValue: 0), queue: queue)
            timer.schedule(deadline: DispatchTime.now(), repeating: DispatchTimeInterval.seconds(5), leeway: DispatchTimeInterval.milliseconds(100))
            
            timer.setEventHandler {
                print("image==\(imageRef)")
            }
            */
            
        }
        
        if self.audioDataOutput.connection(with: AVMediaType.audio) == connection {
            let  connections = connection.output!.connections
            if connections.count > 0 {
                let connection = connections[0]
                let audioChannels = connection.audioChannels
                for (_,channel) in audioChannels.enumerated() {
                    //let avg = channel.averagePowerLevel
                    //let peak = channel.peakHoldLevel
                    if self.delegate != nil {
                        self.delegate?.avcaptureEngine_capAudio?(channel: channel)
                    }
                }
            }
        }
            
        DispatchQueue.global(qos: DispatchQoS.QoSClass.background).async {
            DispatchQueue.main.async {
                
            }
        }
    }
    
    //抓取
    func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
    }
}

extension AVCaptureEngine:AVCaptureFileOutputRecordingDelegate {
    func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
        print("开始写入文件")
    }
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        print("完成写入文件")
    }
    
    
}

extension NSView {
    func viewToImage() -> NSImage? {
        self.lockFocus()
        let image = NSImage.init(data: self.dataWithPDF(inside: self.bounds))
        image?.unlockFocus()
        return image
    }
}

extension NSImage {
    func nsImageToCGImage() -> CGImage? {
        guard let imageData = self.tiffRepresentation else {
            return nil
        }
        let imageRef = CGImageSourceCreateWithData(imageData as CFData, nil)
        let image = CGImageSourceCreateImageAtIndex(imageRef!, 0, nil)
        return image
    }
}
extension CGImage {
    func imageFromCGImageRef() -> NSImage? {
        var imageRect = CGRect.zero
        imageRect.size.height = CGFloat(self.height)
        imageRect.size.width = CGFloat(self.width)
        let newImage = NSImage.init(size: imageRect.size)
        newImage.lockFocus()
        let imageContext = NSGraphicsContext.current?.graphicsPort
        newImage.unlockFocus()
        return newImage
    }
}

