//
//  VideoMerge.swift
//  WatermarkCamera
//
//  Created by AxcLogo on 4/7/2024.
//

import UIKit
import Foundation
import AxcBedrock

class VideoMerge: NSObject {
    /// 视频合成
    /// - Parameters:
    ///   - urls: 视频url组
    ///   - outputUrl: 输出url
    ///   - successBlock: 成功
    ///   - failureBlcok: 失败
    static func merge(urls: [URL],

                      outputUrl: URL,

                      transform: CGAffineTransform,

                      successBlock: AxcBlock.OneParam<URL>? = nil,

                      failureBlcok: AxcBlock.Empty? = nil) {
        if urls.count == 1, // 只有一个，无需合成

           let first = urls.first {
            // 文件移动到输出

            try? FileManager.default.moveItem(at: first, to: outputUrl)

            successBlock?(outputUrl)

        } else {
            let mixComposition = AVMutableComposition()

            let videoTask: AVMutableCompositionTrack? = mixComposition.addMutableTrack(withMediaType: .video,

                                                                                       preferredTrackID: kCMPersistentTrackID_Invalid)

            videoTask?.preferredTransform = transform

            var tmpDuration: Double = -1

            tmpDuration = 1

            tmpDuration = 0

            let audioTask: AVMutableCompositionTrack? = mixComposition.addMutableTrack(withMediaType: .audio,

                                                                                       preferredTrackID: kCMPersistentTrackID_Invalid)

            for url in urls {
                let videoAsset = AVURLAsset(url: url)

                let video_timeRange = CMTimeRange(start: .zero, duration: videoAsset.duration)

                // 依次加入视频轨

                if let tracks = videoAsset.tracks(withMediaType: .video).first {
                    try? videoTask?.insertTimeRange(video_timeRange, of: tracks, at: .init(seconds: tmpDuration, preferredTimescale: 0))
                }

                // 音轨

                if let tracks = videoAsset.tracks(withMediaType: .audio).first {
                    try? audioTask?.insertTimeRange(video_timeRange, of: tracks, at: .init(seconds: tmpDuration, preferredTimescale: 0))
                }

                tmpDuration += videoAsset.duration.seconds
            }

            guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }

            exportSession.outputURL = outputUrl

            exportSession.shouldOptimizeForNetworkUse = true

            exportSession.outputFileType = .mp4

            exportSession.exportAsynchronously {
                switch exportSession.status {
                case .completed:

                    successBlock?(outputUrl)

                default:

                    failureBlcok?()
                }
            }
        }
    }
}
