//
//  VideoManager.swift
//  Kaluli
//
//  Created by sun on 2021/1/14.
//  Copyright © 2021 Ranka. All rights reserved.
//

import UIKit
import Photos

class VideoManager: NSObject {
    
    class func getVideoExportFilePath() -> String {
        let path = NSTemporaryDirectory().appendingFormat("%@.mp4", UUID().uuidString)
        print("path========>>>>>>", path)
        return path
    }
    
    
    static func exportEditVideo(form asset: AVAsset,
                                timeRange: CMTimeRange,
                                cropFrame: CGRect,
                                orignalSize: CGSize) {
        guard let videoTrack = asset.tracks(withMediaType: .video).first else {
            return
        }
        
        let insertionPoint = timeRange.start
      
        let composition = AVMutableComposition()

        // 视频轨迹
        let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
        try? compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: insertionPoint)
        
        // 音频轨迹
        if let audioTrack = asset.tracks(withMediaType: .audio).first {
            let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
            try? compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: insertionPoint)
        }
        
        let rect = VideoManager.confirmTranslate(videoTrack.naturalSize, cropFrame: cropFrame, orignalSize: orignalSize)
        
        var t1 = confirmTransform(videoTrack)
        t1 = t1.translatedBy(x: rect.origin.x, y: rect.origin.y)
        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
        layerInstruction.setTransform(t1, at: CMTime.zero)
        
        let instruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = timeRange
        instruction.layerInstructions = [layerInstruction]
        
        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = rect.size
        videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
        videoComposition.instructions = [instruction]
        videoComposition.renderScale = 1
        
        let outputUrl = URL(fileURLWithPath: self.getVideoExportFilePath())
        guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough) else {
//            completion(nil, NSError(domain: "", code: -1000, userInfo: [NSLocalizedDescriptionKey: "video export failed"]))
            return
        }
      
        exportSession.outputURL = outputUrl
        exportSession.outputFileType = .mp4
        exportSession.timeRange = timeRange
        exportSession.videoComposition = videoComposition
        
        exportSession.exportAsynchronously(completionHandler: {
            switch exportSession.status {
            case .completed:
                print("=========>>>", outputUrl)
            case .failed:
                print("")
            case .cancelled:
                print("")
            default:
                break
            }
        })
    }
    
    static func confirmTranslate(_ naturalSize: CGSize,
                                 cropFrame: CGRect,
                                 orignalSize: CGSize) -> CGRect {
        
        if cropFrame.width > 0 && cropFrame.height > 0 {
            let scale = cropFrame.width / cropFrame.height
            let maxWidth = naturalSize.width
            let maxHeight = naturalSize.height
            
            var width = maxHeight * scale
            var height = maxWidth / scale
            if width > maxWidth {
                width = maxWidth
                height = width / scale
            } else if height > maxHeight {
                height = maxHeight
                width = height * scale
            }
            
            if orignalSize.width > 0 && orignalSize.height > 0 {
                let x = cropFrame.origin.x / orignalSize.width * maxWidth
                let y = cropFrame.origin.y / orignalSize.height * maxHeight
                return CGRect(x: -floor(x), y: -floor(y), width: width, height: height)
            }
        }
        return .zero
    }
    
    static func confirmTransform(_ assetTrack: AVAssetTrack) -> CGAffineTransform {
        let info = orientationFromTransform(assetTrack.preferredTransform)
        let videoSize = assetTrack.naturalSize
        
        var transform = CGAffineTransform.identity
        switch info.orientation {
        case .left:
            transform = transform.translatedBy(x: 0, y: videoSize.width)
            transform = transform.rotated(by: .pi * 2 / 3)
        case .right:
            transform = transform.translatedBy(x: videoSize.height, y: 0)
            transform = transform.rotated(by: .pi * 0.5)
        case .down:
            transform = transform.translatedBy(x: videoSize.width, y: videoSize.height)
            transform = transform.rotated(by: .pi)
        default:
            break
        }
        
        return transform
    }
    
    static func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {
        var assetOrientation = UIImage.Orientation.up
        var isPortrait = false
        let tfA = transform.a
        let tfB = transform.b
        let tfC = transform.c
        let tfD = transform.d
        
        if tfA == 0 && tfB == 1.0 && tfC == -1.0 && tfD == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if tfA == 0 && tfB == -1.0 && tfC == 1.0 && tfD == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if tfA == 1.0 && tfB == 0 && tfC == 0 && tfD == 1.0 {
            assetOrientation = .up
        } else if tfA == -1.0 && tfB == 0 && tfC == 0 && tfD == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }
}
