//
//  TakeVideoViewController.swift
//  Swift_xhl
//
//  Created by 任超华 on 2021/7/27.
//

import UIKit
import AVFoundation

class TakeVideoViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
    
    //视频捕获会话。它是input和output的桥梁。它协调着intput到output的数据传输
    let captureSession = AVCaptureSession()
    //视频输入设备
    var videoDevice = AVCaptureDevice.default(for: .video)
    //音频输入设备
    let audioDevice = AVCaptureDevice.default(for: .audio)
    //将捕获到的视频输出到文件
    let fileOutput = AVCaptureMovieFileOutput()
    var videoInput: AVCaptureDeviceInput?;
    var videoLayer: AVCaptureVideoPreviewLayer?
    
    // 预览播放器
    var xhlplayer: AVPlayer?
    // 预览播放视图
    var playerLayer: AVPlayerLayer?
    
    // 最大录制时长
    var timelimit = 15
    // 倒计时Label
    var timeLabel: UILabel!
    
    // 定时器标识   用来结束定时器
    var timerName: String?
    
    // 预览图
    var videoImage: UIImage?
    
    // 录制按钮
    var recordBtn: UIButton?
    // 重拍按钮
    var reTakeBtn: UIButton?
    
    // 完成按钮
    var doneBtn: UIButton?
    // 记录url
    var videoUrl: URL?
    // 完成回调
    var doneBlock: ((_ fileUrl: URL, _ image: UIImage) -> ())?
    
    init() {
        super.init(nibName: nil, bundle: nil)
    }
    
    
    init(doneBlock: ((_ fileUrl: URL, _ image: UIImage) -> ())?) {
        super.init(nibName: nil, bundle: nil)
        self.modalPresentationStyle = .fullScreen
        self.doneBlock = doneBlock
    }
    
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        self.prepareRecord()
    }
    
    override func viewDidLoad() {
        super.viewDidLoad()
        view.backgroundColor = .white
        
        //添加视频、音频输入设备
        videoInput = try? AVCaptureDeviceInput(device: self.videoDevice!)
        if videoInput == nil {
            self.dismiss(animated: true, completion: nil);
            return;
        }
        self.captureSession.addInput(videoInput!)
        let audioInput = try? AVCaptureDeviceInput(device: self.audioDevice!)
        self.captureSession.addInput(audioInput!)
        
        //添加视频捕获输出 
        self.captureSession.addOutput(self.fileOutput)
        
        //使用AVCaptureVideoPreviewLayer可以将摄像头的拍摄的实时画面显示在ViewController上
        videoLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
        videoLayer?.frame = self.view.bounds
        videoLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        self.view.layer.addSublayer(videoLayer!)
        
//        //启动session会话
//        self.captureSession.startRunning()
        
        self.initSubViews()
    }
    
    func initSubViews() {
        let navHeight = 88
        // 关闭按钮
        let closeBtn = UIButton(frame: CGRect(x: 15, y: navHeight, width: 40, height: 40));
        closeBtn.setImage(UIImage(named: "icon_close"), for: .normal)
        closeBtn.backgroundColor = CColor.Color(r: 0, g: 0, b: 0, a: 0.4);
        closeBtn.addTarget(self, action: #selector(closeClick), for: .touchUpInside)
        closeBtn.layer.cornerRadius = 20;
        view.addSubview(closeBtn)
        
        // 切换镜头按钮
        let exchangeBtn = UIButton(frame: CGRect(x: view.width - 40.0 - 15, y: CGFloat(navHeight), width: 40.0, height: 40.0));
        exchangeBtn.setImage(UIImage(named: "icon_exchange"), for: .normal)
        exchangeBtn.backgroundColor = CColor.Color(r: 0, g: 0, b: 0, a: 0.4);
        exchangeBtn.addTarget(self, action: #selector(exchangeBtnClick), for: .touchUpInside)
        exchangeBtn.layer.cornerRadius = 20;
        view.addSubview(exchangeBtn)
        
        // 倒计时Label
        let timeLabel = UILabel(frame: CGRect(x: (view.width - 150) / 2, y: CGFloat(navHeight + 5), width: 150, height: 30));
        timeLabel.textColor = .white
        timeLabel.backgroundColor = CColor.Color(r: 0, g: 0, b: 0, a: 0.4);
        timeLabel.textAlignment = .center
        timeLabel.layer.cornerRadius = 10;
        timeLabel.font = UIFont.systemFont(ofSize: 16)
        timeLabel.clipsToBounds = true
        timeLabel.alpha = 0
        self.timeLabel = timeLabel
        view.addSubview(timeLabel)
        
        // 录制按钮
        let recordBtn = UIButton(frame: CGRect(x: (view.frame.size.width - 60) / 2, y: view.height - 60 - 88, width: 80, height: 80));
//        recordBtn.backgroundColor = .red
        recordBtn.layer.cornerRadius = 30
        recordBtn.contentMode = .scaleAspectFit
        recordBtn.setImage(UIImage(named: "record_ready"), for: .normal)
        recordBtn.setImage(UIImage(named: "record_ing"), for: .selected)
        recordBtn.addTarget(self, action: #selector(recordClick), for: .touchUpInside)
        self.recordBtn = recordBtn;
        view.addSubview(recordBtn)
        
        // 重拍按钮
        let reTakeBtn = UIButton(frame: CGRect(x: 15, y: view.height - 60 - 40, width: 40, height: 40));
        reTakeBtn.backgroundColor = CColor.Color(r: 0, g: 0, b: 0, a: 0.4);
        reTakeBtn.setImage(UIImage(named: "icon_retake"), for: .normal)
        reTakeBtn.alpha = 0
        reTakeBtn.layer.cornerRadius = 20
        reTakeBtn.addTarget(self, action: #selector(reTakeClick), for: .touchUpInside)
        self.reTakeBtn = reTakeBtn
        self.view.addSubview(reTakeBtn)
        
        let doneBtn = UIButton(frame: CGRect(x: view.width - 60 - 15, y: view.height - 60 - 40, width: 60, height: 40));
        doneBtn.backgroundColor = .blue
        doneBtn.alpha = 0
        doneBtn.setTitle("完成", for: .normal)
        doneBtn.setTitleColor(.white, for: .normal)
        doneBtn.layer.cornerRadius = 10;
        doneBtn.addTarget(self, action: #selector(doneClick), for: .touchUpInside)
        self.doneBtn = doneBtn
        self.view.addSubview(doneBtn)
        
    }
    
    @objc func recordClick() {
        if(!fileOutput.isRecording) {
            let paths = NSSearchPathForDirectoriesInDomains(.cachesDirectory,
                                                            .userDomainMask, true)
            let documentsDirectory = paths[0] as String
            let timeInterval: TimeInterval = Date().timeIntervalSince1970
            let timeStamp = Int(timeInterval)
            let outputFilePath = "\(documentsDirectory)/output\(timeStamp).mov"
            let outputURL = URL(fileURLWithPath: outputFilePath)
//            let fileManager = FileManager.default
//            if(fileManager.fileExists(atPath: outputFilePath)) {
                
//            }
            print("开始录制：\(outputFilePath) ")
            fileOutput.startRecording(to: outputURL,
                                      recordingDelegate: self)
        } else {
            self.stopRecorder()
        }
    }
    
    func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
        var tempTime = timelimit;
        self.timeLabel.alpha = 1;
        self.timeLabel.text = "00:00:\(tempTime)"
        
        self.recordBtn?.isSelected = true
        
        timerName = GCDTimer.exectTask {[weak self] in
            tempTime -= 1;
            if (tempTime < 0) {
                self?.stopRecorder()
                return
            }
            self?.timeLabel.text = "00:00:\(tempTime)"
        }
    }
    
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        let asset = AVURLAsset(url: outputFileURL, options: nil)
        var duration : TimeInterval = 0.0
        duration = CMTimeGetSeconds(asset.duration)
        self.recordBtn?.isSelected = false
        //生成视频截图
        if duration > 1 {
            let generator = AVAssetImageGenerator(asset: asset)
            generator.appliesPreferredTrackTransform = true
            let time = CMTimeMakeWithSeconds(0.0,preferredTimescale: 100)
            var actualTime:CMTime = CMTimeMake(value: 0,timescale: 0)
            let imageRef:CGImage = try! generator.copyCGImage(at: time, actualTime: &actualTime)
            let frameImg = UIImage(cgImage: imageRef)
            self.videoImage = frameImg
            
            self.stopRecorder()
            self.previewVideo(outputURL: outputFileURL)
        }else{
//            Aler
        }
    }
    
    func stopRecorder() {
        GCDTimer.cancelTask(self.timerName ?? "")
        fileOutput.stopRecording()
        self.timeLabel.alpha = 0;
        
        self.recordBtn?.alpha = 0
        self.reTakeBtn?.alpha = 1
        self.doneBtn?.alpha = 1
        self.captureSession.stopRunning();
    }
    
    public func prepareRecord() {
        self.stopRecorder()
        self.reTakeClick()
    }
    
    func previewVideo(outputURL: URL) {
        
        self.videoUrl = outputURL
        let playerItem = AVPlayerItem(url: outputURL)
        self.xhlplayer = AVPlayer(playerItem: playerItem)
        let playerLayer = AVPlayerLayer(player: self.xhlplayer)
        self.playerLayer = playerLayer
        playerLayer.frame = self.view.bounds
        playerLayer.videoGravity = .resizeAspectFill
        self.view.layer.insertSublayer(playerLayer, at: 1)
        self.xhlplayer?.play()
        
        NotificationCenter.default.addObserver(self, selector: #selector(loopVideo), name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
    }
    
    @objc func reTakeClick() {
        self.recordBtn?.alpha = 1
        self.reTakeBtn?.alpha = 0;
        self.doneBtn?.alpha = 0;
        self.xhlplayer?.pause()
        self.playerLayer?.removeFromSuperlayer()
        self.xhlplayer = nil
        self.videoUrl = nil
        
        self.captureSession.startRunning()
    }
    
    @objc func doneClick() {
        self.xhlplayer?.pause()
        self.playerLayer?.removeFromSuperlayer()
        self.xhlplayer = nil
        
        if self.doneBlock != nil {
            self.doneBlock!(self.videoUrl!, self.videoImage!)
            self.closeClick()
        }
    }
    
    @objc func loopVideo()  {
        self.xhlplayer?.seek(to:CMTime.zero)
        self.xhlplayer?.play()
    }
    
    @objc func closeClick() {
        
        self.stopRecorder()
        self.xhlplayer?.pause()
        self.playerLayer?.removeFromSuperlayer()
        self.xhlplayer = nil
        
        if self.presentingViewController != nil {
            self.dismiss(animated: true, completion: nil);
        } else {
            self.navigationController?.popViewController(animated: true)
        }
    }
    
    @objc func exchangeBtnClick() {
        //获取摄像头的数量（该方法会返回当前能够输入视频的全部设备，包括前后摄像头和外接设备）
        let devices = AVCaptureDevice.devices(for: .video);
        //摄像头的数量小于等于1的时候直接返回
        if (devices.count <= 1) {
            return;
        }
        
        var newCamera: AVCaptureDevice = self.videoDevice!;
        var newInput: AVCaptureDeviceInput;

        let position = self.videoDevice?.position;
        //为摄像头的转换加转场动画
        let animation = CATransition();
        animation.timingFunction = CAMediaTimingFunction(name: .linear)
        animation.duration = 0.5;
        animation.type = CATransitionType.init(rawValue: "oglFlip");
     
        if (position == AVCaptureDevice.Position.front) {
            for device in devices {
                if device.position == AVCaptureDevice.Position.back {
                    newCamera = device;
                    break;
                }
            }
            animation.subtype = .fromLeft;
     
        }else if (position == AVCaptureDevice.Position.back){
            for device in devices {
                if device.position == AVCaptureDevice.Position.front {
                    newCamera = device;
                    break;
                }
            }
            animation.subtype = .fromRight;
        }
        videoLayer?.add(animation, forKey: nil)
     
        //输入流
        newInput = try! AVCaptureDeviceInput(device: newCamera);
        captureSession.beginConfiguration()
            //先移除原来的input
        captureSession.removeInput(videoInput!)
        
        if (captureSession.canAddInput(newInput)) {
            captureSession.addInput(newInput)
            videoInput = newInput
            videoDevice = newCamera
            }else{
                //如果不能加现在的input，就加原来的input
                captureSession.addInput(videoInput!)
            }
        captureSession.commitConfiguration();
    }
}
