//
//  VideoAnimeConverter.swift
//  AIPhotoAnimation
//
//  Created by Cobbly on 2025/2/20.
//

import UIKit
import AVFoundation
import CoreML
import Vision
import Metal

class VideoAnimeConverter: UIViewController {
    
    private let videoView: UIView = {
        let view = UIView()
        view.backgroundColor = .black
        return view
    }()
    
    private let progressView: UIProgressView = {
        let progress = UIProgressView(progressViewStyle: .default)
        progress.progressTintColor = .systemBlue
        return progress
    }()
    
    private let statusLabel: UILabel = {
        let label = UILabel()
        label.textAlignment = .center
        label.textColor = .white
        label.font = .systemFont(ofSize: 14)
        return label
    }()
    
    private let closeButton: UIButton = {
        let button = UIButton(type: .system)
        button.setTitle("关闭", for: .normal)
        button.setTitleColor(.white, for: .normal)
        return button
    }()
    
    private let playButton: UIButton = {
        let button = UIButton(type: .system)
        button.setImage(UIImage(systemName: "play.fill"), for: .normal)
        button.tintColor = .white
        return button
    }()
    
    private let timeLabel: UILabel = {
        let label = UILabel()
        label.textAlignment = .center
        label.textColor = .white
        label.font = .systemFont(ofSize: 12)
        return label
    }()
    
    private let detailLabel: UILabel = {
        let label = UILabel()
        label.textAlignment = .center
        label.textColor = .lightGray
        label.font = .systemFont(ofSize: 12)
        label.numberOfLines = 2
        return label
    }()
    
    private let historyButton: UIButton = {
        let button = UIButton(type: .system)
        button.setImage(UIImage(systemName: "clock.fill"), for: .normal)
        button.tintColor = .white
        return button
    }()
    
    private let processButton: UIButton = {
        let button = UIButton(type: .system)
        button.setTitle("开始处理", for: .normal)
        button.setTitleColor(.white, for: .normal)
        button.backgroundColor = .systemBlue
        button.layer.cornerRadius = 22
        button.titleLabel?.font = .systemFont(ofSize: 16, weight: .medium)
        return button
    }()
    
    private var player: AVPlayer?
    private var playerLayer: AVPlayerLayer?
    private var hayaoProcessor: HayaoStyleProcessor?
    private var processedVideoURL: URL?
    private var isProcessing = false
    private var metalDevice: MTLDevice?
    private var metalCommandQueue: MTLCommandQueue?
    private var processStartTime: Date?
    
    override func viewDidLoad() {
        super.viewDidLoad()
        setupUI()
        setupProcessor()
        setupMetal()
        loadTestVideo()
    }
    
    private func setupUI() {
        view.backgroundColor = .black
        
        view.addSubview(videoView)
        view.addSubview(progressView)
        view.addSubview(statusLabel)
        view.addSubview(timeLabel)
        view.addSubview(detailLabel)
        view.addSubview(closeButton)
        view.addSubview(playButton)
        view.addSubview(historyButton)
        view.addSubview(processButton)
        
        // 调整布局约束
        videoView.snp.makeConstraints { make in
            make.top.equalTo(view.safeAreaLayoutGuide).offset(50) // 增加顶部间距
            make.left.equalToSuperview().offset(20)
            make.right.equalToSuperview().offset(-20)
            make.height.equalTo(videoView.snp.width).multipliedBy(9.0/16.0)
        }
        
        closeButton.snp.makeConstraints { make in
            make.top.equalTo(view.safeAreaLayoutGuide).offset(10)
            make.right.equalToSuperview().offset(-20)
        }
        
        // 进度相关的 UI 放在视频下方
        progressView.snp.makeConstraints { make in
            make.top.equalTo(videoView.snp.bottom).offset(30)
            make.left.equalToSuperview().offset(20)
            make.right.equalToSuperview().offset(-20)
            make.height.equalTo(4) // 增加进度条高度
        }
        
        statusLabel.snp.makeConstraints { make in
            make.top.equalTo(progressView.snp.bottom).offset(15)
            make.left.right.equalToSuperview()
            make.height.equalTo(20)
        }
        
        timeLabel.snp.makeConstraints { make in
            make.top.equalTo(statusLabel.snp.bottom).offset(10)
            make.left.right.equalToSuperview()
            make.height.equalTo(20)
        }
        
        detailLabel.snp.makeConstraints { make in
            make.top.equalTo(timeLabel.snp.bottom).offset(10)
            make.left.equalToSuperview().offset(20)
            make.right.equalToSuperview().offset(-20)
            make.height.equalTo(40)
        }
        
        playButton.snp.makeConstraints { make in
            make.center.equalTo(videoView)
            make.width.height.equalTo(50)
        }
        
        // 修改历史按钮约束，使用 SnapKit
        historyButton.snp.makeConstraints { make in
            make.top.equalTo(view.safeAreaLayoutGuide).offset(16)
            make.trailing.equalTo(closeButton.snp.leading).offset(-16)
            make.width.height.equalTo(44)
        }
        
        // 添加处理按钮约束
        processButton.snp.makeConstraints { make in
            make.centerX.equalToSuperview()
            make.top.equalTo(detailLabel.snp.bottom).offset(30)
            make.width.equalTo(200)
            make.height.equalTo(44)
        }
        
        // 设置初始状态
        statusLabel.text = "视频已就绪"
        timeLabel.text = "等待处理..."
        detailLabel.text = "点击开始处理按钮开始转换"
        
        // 隐藏进度相关 UI
        progressView.isHidden = true
        
        // 调整文本颜色和大小
        statusLabel.textColor = .white
        statusLabel.font = .systemFont(ofSize: 16, weight: .medium)
        
        timeLabel.textColor = .white
        timeLabel.font = .systemFont(ofSize: 14)
        
        detailLabel.textColor = .lightGray
        detailLabel.font = .systemFont(ofSize: 12)
        
        // 设置进度条颜色
        progressView.progressTintColor = .systemBlue
        progressView.trackTintColor = .darkGray
        
        // 添加按钮事件
        closeButton.addTarget(self, action: #selector(closeButtonTapped), for: .touchUpInside)
        playButton.addTarget(self, action: #selector(playButtonTapped), for: .touchUpInside)
        historyButton.addTarget(self, action: #selector(historyButtonTapped), for: .touchUpInside)
        processButton.addTarget(self, action: #selector(processButtonTapped), for: .touchUpInside)
        playButton.isHidden = true
    }
    
    private func setupProcessor() {
        do {
            hayaoProcessor = try HayaoStyleProcessor()
        } catch {
            print("初始化处理器失败: \(error)")
        }
    }
    
    private func setupMetal() {
        metalDevice = MTLCreateSystemDefaultDevice()
        metalCommandQueue = metalDevice?.makeCommandQueue()
    }
    
    private func loadTestVideo() {
        print("开始加载测试视频...")
        
        guard let path = Bundle.main.path(forResource: "test", ofType: "MP4") else {
            statusLabel.text = "找不到测试视频文件"
            print("找不到测试视频文件")
            return
        }
        
        print("找到视频文件路径: \(path)")
        let url = URL(fileURLWithPath: path)
        
        // 先检查视频文件是否存在
        if !FileManager.default.fileExists(atPath: path) {
            statusLabel.text = "视频文件不存在"
            print("视频文件不存在")
            return
        }
        
        // 预加载视频资源
        let asset = AVAsset(url: url)
        Task {
            do {
                // 异步加载视频时长
                let duration = try await asset.load(.duration)
                let tracks = try await asset.load(.tracks)
                
                print("视频时长: \(duration.seconds)秒")
                print("视频轨道数: \(tracks.count)")
                
                await MainActor.run {
                    // 设置播放器
                    let player = AVPlayer(url: url)
                    self.player = player
                    
                    let playerLayer = AVPlayerLayer(player: player)
                    playerLayer.videoGravity = .resizeAspect
                    playerLayer.frame = videoView.bounds
                    videoView.layer.addSublayer(playerLayer)
                    self.playerLayer = playerLayer
                    
                    // 更新 UI，但不自动开始处理
                    statusLabel.text = "视频已就绪"
                    timeLabel.text = "视频时长: \(String(format: "%.1f", duration.seconds))秒"
                    detailLabel.text = "点击开始处理按钮开始转换"
                    processButton.isEnabled = true
                }
            } catch {
                await MainActor.run {
                    statusLabel.text = "视频加载失败"
                    processButton.isEnabled = false
                    print("视频加载失败: \(error)")
                }
            }
        }
    }
    
    private func processVideo(url: URL) {
        guard let processor = hayaoProcessor else {
            statusLabel.text = "处理器未初始化"
            print("❌ 处理器未初始化")
            return
        }
        
        print("🎬 开始处理视频流程...")
        
        let asset = AVAsset(url: url)
        Task {
            do {
                print("⏳ 加载视频资源...")
                let duration = try await asset.load(.duration)
                let durationSeconds = duration.seconds
                
                print("✅ 视频资源加载完成:")
                print("   - 时长: \(durationSeconds)秒")
                
                await MainActor.run {
                    statusLabel.text = "开始处理视频..."
                    progressView.progress = 0
                    timeLabel.text = "准备中..."
                    detailLabel.text = "初始化处理器..."
                }
                
                isProcessing = true
                processStartTime = Date()
                
                // 创建输出文件
                let outputURL = FileManager.default.temporaryDirectory.appendingPathComponent("processed_video.mp4")
                try? FileManager.default.removeItem(at: outputURL)
                print("📁 创建输出文件: \(outputURL.path)")
                
                // 设置视频写入器
                print("⚙️ 配置视频写入器...")
                let videoWriter = try AVAssetWriter(outputURL: outputURL, fileType: .mp4)
                
                // 配置视频设置
                let videoSettings: [String: Any] = [
                    AVVideoCodecKey: AVVideoCodecType.h264,
                    AVVideoWidthKey: 1280,
                    AVVideoHeightKey: 720
                ]
                let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
                videoWriterInput.expectsMediaDataInRealTime = false
                
                print("📊 视频输出配置:")
                print("   - 编码器: H.264")
                print("   - 分辨率: 1280x720")
                
                let adaptor = AVAssetWriterInputPixelBufferAdaptor(
                    assetWriterInput: videoWriterInput,
                    sourcePixelBufferAttributes: [
                        kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB),
                        kCVPixelBufferWidthKey as String: 1280,
                        kCVPixelBufferHeightKey as String: 720
                    ])
                
                // 添加视频输入
                if videoWriter.canAdd(videoWriterInput) {
                    videoWriter.add(videoWriterInput)
                    print("✅ 视频输入添加成功")
                } else {
                    print("❌ 无法添加视频输入")
                    throw NSError(domain: "VideoProcessing", code: -1, userInfo: [NSLocalizedDescriptionKey: "无法添加视频输入"])
                }
                
                // 开始写入
                videoWriter.startWriting()
                videoWriter.startSession(atSourceTime: .zero)
                print("▶️ 开始视频写入会话")
                
                // 提取视频帧并处理
                let generator = AVAssetImageGenerator(asset: asset)
                generator.appliesPreferredTrackTransform = true
                generator.requestedTimeToleranceBefore = .zero
                generator.requestedTimeToleranceAfter = .zero
                
                let frameCount = Int(durationSeconds * 30)
                var processedFrames = 0
                print("🎞 准备处理 \(frameCount) 帧")
                
                // 添加错误处理
                var processingErrors = 0
                let maxErrors = 5
                
                for frameNumber in 0..<frameCount {
                    let time = CMTime(value: CMTimeValue(frameNumber), timescale: 30)
                    
                    do {
                        let cgImage = try generator.copyCGImage(at: time, actualTime: nil)
                        let frameImage = UIImage(cgImage: cgImage)
                        
                        // 处理帧并确保输出维度正确
                        let processedImage = try await processor.process(image: frameImage)
                        
                        // 转换为正确的维度格式
                        guard let pixelBuffer = processedImage.toRGBAPixelBuffer() else {
                            print("❌ 像素缓冲区创建失败")
                            processingErrors += 1
                            if processingErrors >= maxErrors {
                                throw VideoProcessingError.tooManyErrors
                            }
                            continue
                        }
                        
                        // 等待写入器准备就绪
                        var attempts = 0
                        while !videoWriterInput.isReadyForMoreMediaData && attempts < 10 {
                            try await Task.sleep(nanoseconds: 100_000_000)
                            attempts += 1
                        }
                        
                        if attempts >= 10 {
                            throw VideoProcessingError.writerNotReady
                        }
                        
                        if adaptor.append(pixelBuffer, withPresentationTime: time) {
                            print("✅ 帧 \(frameNumber + 1) 写入成功")
                            processingErrors = 0 // 重置错误计数
                        } else {
                            print("❌ 帧 \(frameNumber + 1) 写入失败")
                            processingErrors += 1
                            if processingErrors >= maxErrors {
                                throw VideoProcessingError.writeFailed
                            }
                        }
                        
                        // 更新进度
                        processedFrames += 1
                        await updateProgress(frameNumber: frameNumber, totalFrames: frameCount)
                        
                    } catch {
                        print("❌ 处理帧 \(frameNumber + 1) 失败: \(error)")
                        processingErrors += 1
                        if processingErrors >= maxErrors {
                            throw VideoProcessingError.processingFailed(error)
                        }
                    }
                }
                
                // 检查处理结果
                if processedFrames == 0 {
                    throw NSError(domain: "VideoProcessing", 
                                code: -3, 
                                userInfo: [NSLocalizedDescriptionKey: "没有成功处理任何帧"])
                }
                
                print("⏳ 等待所有处理完成...")
                await updateProgress(frameNumber: frameCount - 1, totalFrames: frameCount)
                
                // 完成视频写入
                print("📝 完成视频写入...")
                videoWriterInput.markAsFinished()
                await videoWriter.finishWriting()
                
                // 检查写入状态
                if videoWriter.status != .completed {
                    throw NSError(domain: "VideoProcessing", 
                                code: -4, 
                                userInfo: [NSLocalizedDescriptionKey: "视频写入未完成: \(videoWriter.status)"])
                }
                
                print("✅ 视频处理完成")
                print("   - 总帧数: \(frameCount)")
                print("   - 成功处理: \(processedFrames)")
                print("   - 输出文件: \(outputURL.path)")
                
                self.processedVideoURL = outputURL
                
                await MainActor.run {
                    statusLabel.text = "视频处理完成"
                    isProcessing = false
                    playButton.isHidden = false
                    processButton.isHidden = true // 处理完成后隐藏处理按钮
                    loadProcessedVideo()
                }
                
            } catch {
                print("❌ 视频处理失败:")
                print("   - 错误: \(error)")
                print("   - 本地化描述: \(error.localizedDescription)")
                
                await MainActor.run {
                    statusLabel.text = "视频处理失败"
                    timeLabel.text = "处理失败"
                    detailLabel.text = error.localizedDescription
                    isProcessing = false
                    
                    // 重置处理按钮状态
                    processButton.isEnabled = true
                    processButton.backgroundColor = .systemBlue
                }
            }
        }
    }
    
    private func loadProcessedVideo() {
        guard let processedVideoURL = processedVideoURL else { return }
        
        let player = AVPlayer(url: processedVideoURL)
        self.player = player
        
        if let oldPlayerLayer = playerLayer {
            oldPlayerLayer.removeFromSuperlayer()
        }
        
        let newPlayerLayer = AVPlayerLayer(player: player)
        newPlayerLayer.videoGravity = .resizeAspect
        newPlayerLayer.frame = videoView.bounds
        videoView.layer.addSublayer(newPlayerLayer)
        self.playerLayer = newPlayerLayer
    }
    
    @objc private func closeButtonTapped() {
        dismiss(animated: true)
    }
    
    @objc private func playButtonTapped() {
        if let player = player {
            if player.timeControlStatus == .playing {
                player.pause()
                playButton.setImage(UIImage(systemName: "play.fill"), for: .normal)
            } else {
                player.play()
                playButton.setImage(UIImage(systemName: "pause.fill"), for: .normal)
            }
        }
    }
    
    @objc private func historyButtonTapped() {
        let historyVC = HistoryViewController()
        let nav = UINavigationController(rootViewController: historyVC)
        nav.modalPresentationStyle = .fullScreen
        present(nav, animated: true)
    }
    
    @objc private func processButtonTapped() {
        guard let url = Bundle.main.url(forResource: "test", withExtension: "MP4") else {
            statusLabel.text = "找不到测试视频文件"
            return
        }
        
        // 显示进度相关 UI
        progressView.isHidden = false
        
        // 禁用处理按钮
        processButton.isEnabled = false
        processButton.backgroundColor = .gray
        
        // 开始处理视频
        processVideo(url: url)
    }
    
    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()
        playerLayer?.frame = videoView.bounds
    }
    
    // 辅助方法：格式化时间
    private func formatTime(_ timeInterval: TimeInterval) -> String {
        let hours = Int(timeInterval) / 3600
        let minutes = Int(timeInterval) / 60 % 60
        let seconds = Int(timeInterval) % 60
        
        if hours > 0 {
            return String(format: "%02d:%02d:%02d", hours, minutes, seconds)
        } else {
            return String(format: "%02d:%02d", minutes, seconds)
        }
    }
    
    // 辅助方法：更新 UI 进度
    private func updateProgress(frameNumber: Int, totalFrames: Int) {
        // 防止除以零
        guard frameNumber >= 0, totalFrames > 0 else { return }
        
        let progress = Float(frameNumber) / Float(totalFrames)
        let currentTime = Date()
        let elapsedTime = currentTime.timeIntervalSince(processStartTime ?? currentTime)
        
        // 防止计算出 NaN 或 Infinite
        let framesPerSecond: Double
        if frameNumber > 0 && elapsedTime > 0 {
            framesPerSecond = Double(frameNumber) / elapsedTime
        } else {
            framesPerSecond = 0
        }
        
        // 安全计算剩余时间
        let remainingFrames = max(0, totalFrames - frameNumber)
        let estimatedRemainingTime: TimeInterval
        if framesPerSecond > 0 {
            estimatedRemainingTime = Double(remainingFrames) / framesPerSecond
        } else {
            estimatedRemainingTime = 0
        }
        
        let formattedElapsedTime = formatTime(max(0, elapsedTime))
        let formattedEstimatedRemainingTime = formatTime(max(0, estimatedRemainingTime))
        let formattedProgress = String(format: "%.0f%%", min(100, max(0, progress * 100)))
        
        let formattedDetails = String(format: "已处理: %d/%d 帧\n当前速度: %.1f 帧/秒",
                                     frameNumber + 1,
                                     totalFrames,
                                     framesPerSecond)
        
        DispatchQueue.main.async {
            self.progressView.progress = min(1, max(0, progress))
            self.statusLabel.text = "处理进度: \(formattedProgress)"
            self.timeLabel.text = "已用时间: \(formattedElapsedTime) / 预计剩余: \(formattedEstimatedRemainingTime)"
            self.detailLabel.text = formattedDetails
        }
    }
}

// 添加扩展方法处理维度转换
extension UIImage {
    func toRGBAPixelBuffer() -> CVPixelBuffer? {
        let width = Int(size.width)
        let height = Int(size.height)
        
        var pixelBuffer: CVPixelBuffer?
        let status = CVPixelBufferCreate(
            kCFAllocatorDefault,
            width,
            height,
            kCVPixelFormatType_32BGRA,
            [
                kCVPixelBufferCGImageCompatibilityKey: true,
                kCVPixelBufferCGBitmapContextCompatibilityKey: true
            ] as CFDictionary,
            &pixelBuffer
        )
        
        guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
            return nil
        }
        
        CVPixelBufferLockBaseAddress(buffer, [])
        defer { CVPixelBufferUnlockBaseAddress(buffer, []) }
        
        let context = CGContext(
            data: CVPixelBufferGetBaseAddress(buffer),
            width: width,
            height: height,
            bitsPerComponent: 8,
            bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
            space: CGColorSpaceCreateDeviceRGB(),
            bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue
        )
        
        context?.draw(cgImage!, in: CGRect(x: 0, y: 0, width: width, height: height))
        return buffer
    }
}

// 添加错误类型
enum VideoProcessingError: Error {
    case tooManyErrors
    case writerNotReady
    case writeFailed
    case processingFailed(Error)
}

// MARK: - UICollectionViewDataSource, UICollectionViewDelegate
extension VideoAnimeConverter: UICollectionViewDataSource, UICollectionViewDelegate {
    func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
        // TODO: 返回实际的历史记录数量
        return 0
    }
    
    func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
        let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "HistoryCell", for: indexPath)
        // TODO: 配置单元格，显示缩略图
        return cell
    }
    
    func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
        // TODO: 处理选中历史记录项的逻辑
    }
}
