//
//  LiveViewController.swift
//  swiftDemo
//
//  Created by Gerry on 2022/1/13.
//

import UIKit
import AVFoundation
import Alamofire
import RxSwift
import RxCocoa

/**
 * @brief 视频录制 直播
 *
 **/
class LiveViewController: UIViewController {

    private lazy var session : AVCaptureSession = AVCaptureSession()
    
    private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: session)
    
    private var movieOutput : AVCaptureMovieFileOutput?
    
    override func viewDidLoad() {
        super.viewDidLoad()

        // Do any additional setup after loading the view.
        
        let btn = UIButton.init(frame: CGRect.init(x: 20, y: 100, width: 80, height: 40))
        btn .setTitle("开始采集", for: .normal)
        btn .addTarget(self, action: #selector(startCapture(btn:)), for: .touchUpInside)
        btn .backgroundColor = .orange
        view .addSubview(btn)
        
        let btn1 = UIButton.init(frame: CGRect.init(x: 200, y: 100, width: 80, height: 40))
        btn1 .setTitle("结束采集", for: .normal)
        btn1 .addTarget(self, action: #selector(stopCapture(btn:)), for: .touchUpInside)
        btn1 .backgroundColor = .orange
        view .addSubview(btn1)
    }
    
    @objc func startCapture(btn:UIButton) {
        // 创建捕捉会话
//        let session = AVCaptureSession()
    
        let devices : [AVCaptureDevice]
//        if (@available(iOS, 10.0)) {
//            devices = AVCaptureDevice.DiscoverySession().devices()
//        } else {
            devices = AVCaptureDevice.devices()
//        }
           
        guard let device = devices.filter({$0.position == .front}).first else {
            print("获取设备失败")
            return
        }
        
        guard let videoInput = try? AVCaptureDeviceInput(device: device) else {
            print("创建输入源失败")
            return
        }
        
        // 设置输入源 (摄像头)
        session.addInput(videoInput)
        
        
        let videoOutput = AVCaptureVideoDataOutput()
        videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global())
        
        // 设置输出源 ()
        session.addOutput(videoOutput)
        
        // 添加写入视频文件output
        let movieOutput = AVCaptureMovieFileOutput()
        session.addOutput(movieOutput)
        self.movieOutput = movieOutput
        
        // 设置稳定性
        let connection = movieOutput.connection(with: AVMediaType.video)
        connection?.preferredVideoStabilizationMode = .auto
        
        // 创建预览图层
        previewLayer.frame = view.bounds
        view.layer.insertSublayer(previewLayer, at: 0)
        
        // 开始采集
        session.startRunning()
        
        // 写文件
        let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! + "/abc.mp4"
        let url = URL.init(fileURLWithPath: path)
        movieOutput.startRecording(to: url, recordingDelegate: self)
    }

    @objc
    func stopCapture(btn:UIButton) {
        
        movieOutput?.stopRecording()
        
        session.stopRunning()
        previewLayer .removeFromSuperlayer() 
    }
}

extension LiveViewController {
    fileprivate func setupAudio() {
        guard let device = AVCaptureDevice.default(for: .audio) else {
            print("获取音频设备失败")
            return
        }

        guard let audioInput = try? AVCaptureDeviceInput(device: device) else {
            print("")
            return
        }
        
        let audioOutput = AVCaptureAudioDataOutput()
        audioOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global())
        
        session.addInput(audioInput)
        session.addOutput(audioOutput)
    }
}

extension LiveViewController : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
    
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        print("采集到数据 CMSampleBuffer")
    }
}

extension LiveViewController : AVCaptureFileOutputRecordingDelegate {

    func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
        print("开始写入")
    }
    
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        print("结束写入")
    }
}
