//
//  LivePhotoMaker.swift
//  MediaManagerKit
//
//  Created by Max Mg on 2024/7/20.
//

import Foundation
import AVFoundation
import ExtensionsKit

public class VideoRecorder: NSObject {
    public static let shared = VideoRecorder()
    
    public typealias LivePhotoCompletionHandler = (URL?, URL?, String?) -> Void
    
    class RecordConfig {
        var duration: TimeInterval = 1
        var frameRate: Int = 30
        var captureView: UIView!
        var completion: ((Error?) -> Void)?
        var recordedDuration: TimeInterval = 0
        var startTime: CFTimeInterval = CACurrentMediaTime()
        var reqiureFrames: Int = 0
        var recordFrames: Int = 0
    }
    
    lazy var recordConfig = RecordConfig()
    
    var assetWriter: AVAssetWriter!
    var assetWriterInput: AVAssetWriterInput!
    var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!
    
    var timer: Timer?
    lazy var isRecording = false
    var displayLink: CADisplayLink?
    let queue = DispatchQueue(label: "xxxxxx", attributes: .concurrent)
    
    var videoOutuptURL: URL {
        let temp = FileManager.default.temporaryDirectory
        return temp.appendingPathComponent("livePhotoOutput.mov")
    }
    
    override init() {
        super.init()
    }
    
    public func capture(in view: UIView,
                        duration: TimeInterval,
                        outputFinder: URL,
                        completion: LivePhotoCompletionHandler?) {
        
        captureVideo(in: view, duration: duration, frameRate: 30) { [weak self] videoURL, error in
            guard let videoURL = videoURL else {
                completion?(nil, nil, error?.localizedDescription)
                return
            }
            LivePhotoCreater.shared.saveLivePhoto(from: videoURL, outputDirectory: outputFinder) { videoURL, imageURL, msg in
                completion?(videoURL, imageURL, msg)
            }
        }
    }
    
    /// 录制一个view生成视频
    /// - Parameters:
    ///   - view: 目标view
    ///   - duration: 时长
    ///   - frameRate: 帧率
    ///   - completion: URL: 视频temp文件夹地址
    public func captureVideo(in view: UIView,
                             duration: TimeInterval,
                             frameRate: Int,
                             completion: ((URL?, Error?) -> Void)?) {
        
        guard !isRecording else {
            recordConfig.completion?(NSError(domain: "Video is Recording, try again later", code: 401))
            return
        }
        
        recordConfig.duration = duration
        recordConfig.frameRate = frameRate
        recordConfig.reqiureFrames = Int(Double(frameRate) * duration)
        recordConfig.recordFrames = 0
        recordConfig.captureView = view
        recordConfig.recordedDuration = 0
        recordConfig.completion = { [weak self] error in
            if let error = error {
                completion?(nil, error)
            } else {
                completion?(self?.videoOutuptURL, nil)
            }
        }
        
        startRecordVideo()
    }
}

// MARK: -- 视频录制
extension VideoRecorder {
    func startRecordVideo() {
        guard !isRecording, let view = recordConfig.captureView else {
            recordConfig.completion?(NSError(domain: "Video is Recording, try again later", code: 401))
            return
        }
        
        if FileManager.default.fileExists(atPath: videoOutuptURL.path) {
            try? FileManager.default.removeItem(atPath: videoOutuptURL.path)
        }
        
        do {
            assetWriter = try AVAssetWriter(outputURL: videoOutuptURL, fileType: .mov)
        } catch {
            print("Error creating AVAssetWriter: \(error)")
            recordConfig.completion?(error)
        }
        
        let size = CGSize(width: view.bounds.width * UIScreen.main.scale, height: view.bounds.height * UIScreen.main.scale)
//        let size = CGSize(width: view.bounds.width, height: view.bounds.height)
        let outputSettings: [String: Any] = [
            AVVideoCodecKey: AVVideoCodecType.h264,
            AVVideoWidthKey: size.width,
            AVVideoHeightKey: size.height
        ]
        assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
        assetWriterInput.expectsMediaDataInRealTime = true
        assetWriter.add(assetWriterInput)
        
        let pixelBufferAttributes: [String: Any] = [
            kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
            kCVPixelBufferWidthKey as String: size.width,
            kCVPixelBufferHeightKey as String: size.height
        ]
        pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterInput, sourcePixelBufferAttributes: pixelBufferAttributes)
        
        assetWriter.startWriting()
        assetWriter.startSession(atSourceTime: .zero)
        isRecording = true
        recordConfig.startTime = CACurrentMediaTime()
        
        // 第一帧
        let layer = self.recordConfig.captureView.layer
        let cgImage = layer.contents
//        snapshotView(afterScreenUpdates: false)?.layer ?? self.recordConfig.captureView.layer
        let videoSize = self.recordConfig.captureView.bounds.size
//        queue.async {
//            let image = self.drawViewHierarchyToImage(layer: layer)
            if let pixelBuffer = self.createPixelBuffer(from: self.recordConfig.captureView, videoSize: videoSize) {
                for _ in 0..<5 {
                    while true {
                        if self.pixelBufferAdaptor.assetWriterInput.isReadyForMoreMediaData {
                            let presentationTime = CMTime(seconds: Double(recordConfig.recordedDuration), preferredTimescale: 600)
                            self.pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                            
                            recordConfig.recordFrames += 1
                            recordConfig.recordedDuration += 1.0 / CGFloat(recordConfig.frameRate)
                            Log("append===\(recordConfig.recordFrames)")
                            break
                        }
                    }
                }
            }
        
//        appendPixelBuffer(at: CMTime.zero)
        timer = Timer.scheduledTimer(withTimeInterval: 1.0 / CGFloat(recordConfig.frameRate), repeats: true) { [weak self] _ in
            self?.timerHandler()
        }
//        displayLink = CADisplayLink(target: self, selector: #selector(timerHandler))
//        displayLink?.preferredFramesPerSecond = 10
//        displayLink?.add(to: .current, forMode: .common)
    }
    
    func stopRecording() {
        timer?.invalidate()
        timer = nil
        displayLink?.invalidate()
        displayLink = nil
        
        assetWriterInput.markAsFinished()
        assetWriter.finishWriting { [weak self] in
            guard let self = self else { return }
            self.isRecording = false
            if self.assetWriter.status == .completed {
                self.recordConfig.completion?(nil)
            } else {
                self.recordConfig.completion?(assetWriter.error)
            }
        }
    }
    
    @objc func timerHandler() {
        if recordConfig.recordFrames >= recordConfig.reqiureFrames {
//        if recordConfig.recordedDuration >= recordConfig.duration {
            stopRecording()
            return
        }
        let presentationTime = CMTime(seconds: Double(recordConfig.recordedDuration), preferredTimescale: 600)
        self.appendPixelBuffer(at: presentationTime)
        recordConfig.recordFrames += 1
        recordConfig.recordedDuration += 1.0 / CGFloat(recordConfig.frameRate)
        Log("===\(recordConfig.recordedDuration)")
    }
}

extension VideoRecorder {
    func appendPixelBuffer(at time: CMTime) {
        let layer = self.recordConfig.captureView.layer
        let cgImage = layer.contents
//        snapshotView(afterScreenUpdates: false)?.layer ?? self.recordConfig.captureView.layer
        let size = self.recordConfig.captureView.bounds.size
//        queue.async {
//            let image = self.drawViewHierarchyToImage(layer: layer)
            if let pixelBuffer = self.createPixelBuffer(from: self.recordConfig.captureView, videoSize: size),
               self.pixelBufferAdaptor.assetWriterInput.isReadyForMoreMediaData {
                self.pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: time)
            }
//        }
    }
    
    /// 获取Buffer
    func createPixelBuffer(from view: UIView, videoSize: CGSize) -> CVPixelBuffer? {
//        let videoSize = view.frame.size
//        var contextSize = videoSize
        var contextSize = CGSize(width: videoSize.width*UIScreen.main.scale, height: videoSize.height*UIScreen.main.scale)
        
        let pixelBufferAttributes: [String: Any] = [
            kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32ARGB,
            kCVPixelBufferWidthKey as String: contextSize.width,
            kCVPixelBufferHeightKey as String: contextSize.height
        ]
        
        var pixelBuffer: CVPixelBuffer?
        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(contextSize.width), Int(contextSize.height), kCVPixelFormatType_32ARGB, pixelBufferAttributes as CFDictionary, &pixelBuffer)
        
        if status != kCVReturnSuccess || pixelBuffer == nil {
            print("Error creating pixel buffer")
            return nil
        }
        
        CVPixelBufferLockBaseAddress(pixelBuffer!, .init(rawValue: 0))
        guard let context = CGContext(data: CVPixelBufferGetBaseAddress(pixelBuffer!),
                                width: Int(contextSize.width),
                                height: Int(contextSize.height),
                                bitsPerComponent: 8,
                                bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!),
                                space: CGColorSpaceCreateDeviceRGB(),
                                      bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue) else {
            return nil
        }
        
        context.translateBy(x: contextSize.width / 2, y: contextSize.height / 2)
        context.scaleBy(x: 1, y: -1) // Flip vertically
        context.translateBy(x: -contextSize.width / 2, y: -contextSize.height / 2)
        
        UIGraphicsPushContext(context)
//        context.saveGState()
//        CGContextSaveGState(context)
//        layer.render(in: context)
//        context.restoreGState()
//        image.draw(in: CGRect(origin: .zero, size: contextSize))
        
        view.drawHierarchy(in: CGRect(origin: .zero, size: contextSize), afterScreenUpdates: false)
        UIGraphicsPopContext()
        
        CVPixelBufferUnlockBaseAddress(pixelBuffer!, .init(rawValue: 0))
        
        return pixelBuffer
    }
    
    func drawViewHierarchyToImage(layer: CALayer) -> UIImage? {
        // 创建绘制上下文
        UIGraphicsBeginImageContextWithOptions(UIScreen.main.bounds.size, false, UIScreen.main.scale)
        defer { UIGraphicsEndImageContext() }

        guard let context = UIGraphicsGetCurrentContext() else { return nil }

        // 在绘制上下文中绘制视图内容
        layer.render(in: context)

        // 获取绘制完成的图片
        let image = UIGraphicsGetImageFromCurrentImageContext()
        return image
    }

}

extension VideoRecorder {
    func generateVideoFromAnimation(view: UIView, size: CGSize, duration: Double, completion: ((URL?) -> Void)?) {
        // 设置视频的参数
        let framePerSecond = 30
        let totalFrames = Int(duration * Double(framePerSecond))
        
        let mainComposition = AVMutableComposition()
        let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
        try? compositionVideoTrack?.insertTimeRange(CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: CMTimeScale(framePerSecond))), of: mainComposition.tracks.first!, at: CMTime.zero)
        
        let videoComposition = AVMutableVideoComposition()
        videoComposition.frameDuration = CMTime(value: 1, timescale: CMTimeScale(framePerSecond))
        videoComposition.renderSize = size
        let tool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: view.layer, in: view.layer)
        videoComposition.animationTool = tool
        
        let videoInstruction = AVMutableVideoCompositionInstruction()
        videoInstruction.timeRange = CMTimeRange(start: .zero, duration: CMTime(value: 1, timescale: CMTimeScale(framePerSecond)))
        videoComposition.instructions = [videoInstruction]
        
        // 设置导出的路径和文件名
        let outputPath = NSTemporaryDirectory() + "output.mp4"
        let outputURL = URL(fileURLWithPath: outputPath)
        
        let fileManager = FileManager.default
        if fileManager.fileExists(atPath: outputPath) {
            try? fileManager.removeItem(atPath: outputPath)
        }
        
        guard let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality) else {
            return
        }
        exporter.outputURL = outputURL
        exporter.outputFileType = .mp4
        exporter.shouldOptimizeForNetworkUse = true
        exporter.videoComposition = videoComposition
        exporter.audioMix = AVAudioMix()
        
        exporter.exportAsynchronously {
            switch exporter.status {
            case .completed:
                print("视频生成成功: \(outputURL)")
                completion?(outputURL)
            case .failed:
                print("视频生成失败: \(String(describing: exporter.error))")
            case .cancelled:
                print("视频生成取消")
            default:
                break
            }
        }
    }
}
