//
//  ScreenRecordManager.swift
//  TestDemo
//
//  Created by Kun Huang on 2020/10/25.
//  Copyright © 2020 HK. All rights reserved.
//

import Foundation
import ReplayKit
import AVKit

// https://www.jianshu.com/p/97798d35f3a3

/// 分辨率
public struct Pixel {
    /// 宽
    var width:Double
    /// 高
    var height:Double
}

/// 视频的一些属性配置
public struct VideoCompressionSetting {
    /// 像素 默认 1080*1920
    var pixel:Pixel = Pixel(width: 1440, height:2560)
    
    /// 比特率，单位时间内的数据 Mbps
    var bitRate = 4.0
    
    /// 关键帧最大间隔，1为每个都是关键帧，数值越大压缩率越高
    var frameInterval = 3.0
    
    /// 帧率 最高30fps
    var frameRate = 30.0
    
    /// 视频填充模式
    var scalingModel:String = AVVideoScalingModeResizeAspectFill
}

/// 音频的一些属性配置
public struct AudioCompressionSetting {
    
}

@available(iOS 11.0, *)
public class ScreenRecordManager {
    
    public static let share = ScreenRecordManager()
    private init() {
        registerNotification()
    }
    
    private var assetWriter:AVAssetWriter?
    /// 视频数据
    private var videoWriterInput:AVAssetWriterInput?
    /// 音频数据
    private var audioWriterInput:AVAssetWriterInput?
    
    private var startedSession = false
    var isRecording:Bool {
        return startedSession
    }
    
    /// 是否录制音频
    var isRecordAudio = true
    
    /// 是否暂停
    var isPaused = false
    
    private var taskidentifier:UIBackgroundTaskIdentifier?
    private var isEnterBackgroud = false
    private var backgroundWriterBlock:((_ buffer:CMSampleBuffer, _ bufferType:RPSampleBufferType) -> Void)?
    
    /// 注册一些通知
    private func registerNotification() {
        NotificationCenter.default.addObserver(self, selector: #selector(appEnterBackground), name: UIApplication.didEnterBackgroundNotification, object: nil)
        NotificationCenter.default.addObserver(self, selector: #selector(appBecomeActive), name: UIApplication.didBecomeActiveNotification, object: nil)
        
        NotificationCenter.default.addObserver(self, selector: #selector(appWillTerminate), name: UIApplication.willTerminateNotification, object: nil)
    }
    
    /// 开始录制
    /// - Parameters:
    ///   - videoSetting: 视频属性配置
    ///   - filePath: 文件存储沙河路径
    ///   - failed: 失败错误回调
    func startRecord(videoSetting:VideoCompressionSetting = VideoCompressionSetting(),
                     filePath:String,
                     failed:((_ error:Error?) -> Void)? = nil) {
        
        let fileURL = URL(fileURLWithPath: filePath)
        assetWriter = try? AVAssetWriter(outputURL: fileURL, fileType: .mp4)
        guard let assetWriter = self.assetWriter else {return}
        
        let videoProperties = [AVVideoAverageBitRateKey:NSNumber(value: videoSetting.bitRate*1000000),
                              AVVideoExpectedSourceFrameRateKey:NSNumber(value: videoSetting.frameRate),
                              AVVideoMaxKeyFrameIntervalKey:NSNumber(value: videoSetting.frameInterval),
                              AVVideoProfileLevelKey : AVVideoProfileLevelH264HighAutoLevel] as [String : Any]
        
        let videoSettings = [AVVideoCodecKey:AVVideoCodecType.h264,
                        AVVideoWidthKey:NSNumber(value: videoSetting.pixel.width),
                        AVVideoHeightKey:NSNumber(value: videoSetting.pixel.height),
                        AVVideoCompressionPropertiesKey:videoProperties] as [String : Any]
//        if #available(iOS 11.0, *) {
//            settings[AVVideoCodecKey] = AVVideoCodecType.h264
//        } else {
//            settings[AVVideoCodecKey] = AVVideoCodecH264
//        }
        
        let audioSettings = [AVEncoderBitRatePerChannelKey:28000,
                             AVFormatIDKey:kAudioFormatMPEG4AAC,
                             AVNumberOfChannelsKey:1,
                             AVSampleRateKey:22050]
        
        videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
        videoWriterInput?.expectsMediaDataInRealTime = true
        if let input = videoWriterInput {
            assetWriter.add(input)
        }
        
        audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
        audioWriterInput?.expectsMediaDataInRealTime = false
        if let input = audioWriterInput {
            assetWriter.add(input)
        }
        
        RPScreenRecorder.shared().isMicrophoneEnabled = true
        
        RPScreenRecorder.shared().startCapture {[weak self] (buffer, bufferType, error) in
            print("-------")
            if error != nil {
                failed?(error)
                return
            }
        
            // buffer 是否可读
            if CMSampleBufferDataIsReady(buffer) {
                if self?.isEnterBackgroud == true {
                    print("进入后台任务")
                    self?.backgroundWriterBlock?(buffer,bufferType)
                } else {
                    print("没有在后台任务")
                    self?.handelBuffer(buffer: buffer, bufferType: bufferType)
                }
            }
        } completionHandler: { (error) in
            failed?(error)
        }

    }
    
    /// 停止录制
    /// - Parameter failed: 失败错误回调
    func stopRecord(failed:((_ error:Error?) -> Void)? = nil) {
        RPScreenRecorder.shared().stopCapture {[weak self] (error) in
            if error != nil {
                failed?(error)
                return
            }
            
            self?.assetWriter?.finishWriting {
                print("finishWriting")
            }
            self?.startedSession = false
        }
    }
    
    /// 处理视频音频流
    private func handelBuffer(buffer:CMSampleBuffer,bufferType:RPSampleBufferType) {
        
        if assetWriter?.status == .unknown {
            if !isRecording {
                if assetWriter?.startWriting() != true {return}
                assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(buffer))
                startedSession = true
            }
        } else if assetWriter?.status == .writing {
            switch bufferType {
            case .video:
                if videoWriterInput?.isReadyForMoreMediaData == true {
                    print("append video buffer")
                    videoWriterInput?.append(buffer)
                }
            case .audioMic:
                if audioWriterInput?.isReadyForMoreMediaData == true {
                    audioWriterInput?.append(buffer)
                }
            default:
                // 其它的不处理
                break
            }
        } else if assetWriter?.status == .failed {
            print("assetWriter failed = \(String(describing: assetWriter?.error?.localizedDescription)), assetWriter status = \(String(describing: assetWriter?.status.rawValue))")
        } else {
            print("assetWriter.status = \(String(describing: assetWriter?.status.rawValue))")
        }
    }
}

@available(iOS 11.0, *)
public extension ScreenRecordManager {
    
    /// 进入后台
    @objc private func appEnterBackground() {
        print("进入后台")
        isEnterBackgroud = true
        taskidentifier = UIApplication.shared.beginBackgroundTask {
            print("后台任务过期")
            if let identifier = self.taskidentifier {
                UIApplication.shared.endBackgroundTask(identifier)
            }
            self.taskidentifier = .invalid
        }
        
        // 后台任务
        backgroundWriterBlock = { (buffer, bufferType) in
            print("在后台写入")
            self.handelBuffer(buffer: buffer, bufferType: bufferType)
        }
    }
    
    /// 进入前台
    @objc private func appBecomeActive() {
        print("进入前台")
        isEnterBackgroud = false
        if let identifier = taskidentifier {
            UIApplication.shared.endBackgroundTask(identifier)
        }
    }
    
    /// app 杀死
    @objc private func appWillTerminate() {
        print("app 杀死")
        assetWriter?.finishWriting {
            print("finishWriting")
        }
    }
}

@available(iOS 11.0, *)
public extension ScreenRecordManager {
    /// 视频合成
    /// - Parameters:
    ///   - filePaths: 需要合成的视频的地址
    ///   - toPath: 合成到的地址
    ///   - success: 成功回调
    ///   - failed: 失败回调
    func compositeVideos(filePaths:[String],
                         toPath:String,
                         success:(() -> Void)? = nil,
                         failed:((_ error: Error?) -> Void)? = nil) {
        let composition = AVMutableComposition()
        
        // 视频合成轨道
        let videoCompositonTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
        // 音频合成轨道
        let audioCompositionTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
        
        var duration = CMTime.zero
        for path in filePaths {
            let asset = AVURLAsset(url: URL(fileURLWithPath: path))
            let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
            if let videoTrack = asset.tracks(withMediaType: .video).first,
               let audioTack = asset.tracks(withMediaType: .audio).first {
                do {
                    // 依次加入 每个asset
                    // timeRange 加入asset的时间
                    // track 加入asset的类型
                    // duration 从那个时间点加入
                    try videoCompositonTrack?.insertTimeRange(timeRange, of: videoTrack, at: duration)
                    try audioCompositionTrack?.insertTimeRange(timeRange, of: audioTack, at: duration)
                    duration = duration + asset.duration
                } catch let error {
                    print("合成出错 error = \(error)")
                    failed?(error)
                    return
                }
            }
        }
        
        // 保存
        saveVideo(composition: composition,
                  outputPath: toPath,
                  success: success,
                  failed: failed)
    }
    
    /// 保存
    /// - Parameters:
    ///   - composition: AVMutableComposition
    ///   - outputPath: 保存到的路径
    ///   - success: 成功回调
    ///   - failed: 失败回调
    private func saveVideo(composition:AVMutableComposition,
                           outputPath:String,
                           success:(() -> Void)? = nil,
                           failed:((_ error: Error?) -> Void)? = nil) {
        let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
        guard let export = exportSession else {
            return
        }
        export.outputURL = URL(fileURLWithPath: outputPath)
        export.outputFileType = .mp4
        export.exportAsynchronously(completionHandler: {
            DispatchQueue.main.async {
                switch export.status {
                case .unknown:
                    print("export unknown")
                case .waiting:
                    print("export waiting")
                case .exporting:
                    print("export exporting")
                case .completed:
                    print("export completed")
                    success?()
                case .failed:
                    print("export failed")
                    failed?(export.error)
                case .cancelled:
                    print("export cancelled")
                @unknown default:
                    print("export other status")
                }
            }
        })
    }
}
