//
//  LivePhoto17.swift
//  HWallpaper
//
//  Created by 严楠楠 on 2023/10/23.
//

import UIKit
import MobileCoreServices
import Photos
import ImageIO
import UniformTypeIdentifiers

class LivePhoto17 {
    // MARK: PUBLIC
    typealias LivePhotoResources = (pairedImage: URL, pairedVideo: URL)
    /// Returns the paired image and video for the given PHLivePhoto
    public class func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
        queue.async {
            shared.extractResources(from: livePhoto, completion: completion)
        }
    }
    /// Generates a PHLivePhoto from an image and video.  Also returns the paired image and video.
    public class func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
        queue.async {
            shared.generate(from: imageURL, videoURL: videoURL, progress: progress, completion: completion)
        }
    }
    /// Save a Live Photo to the Photo Library by passing the paired image and video.
    public class func saveToLibrary(_ resources: LivePhotoResources, completion: @escaping (Bool) -> Void) {
        PHPhotoLibrary.shared().performChanges({
            let creationRequest = PHAssetCreationRequest.forAsset()
            creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: resources.pairedVideo, options: nil)
            creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: resources.pairedImage, options: nil)
        }, completionHandler: { (success, error) in
            if error != nil {
                print(error as Any)
            }
            completion(success)
        })
    }
    
    // MARK: PRIVATE
    private let videoBox = WAVideoBox()
    static let shared = LivePhoto17()
    private static let queue = DispatchQueue(label: "com.limit-point.LivePhotoQueue", attributes: .concurrent)
    lazy var cacheDirectory: URL? = {
        if let cacheDirectoryURL = try? FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
            let fullDirectory = cacheDirectoryURL.appendingPathComponent("com.limit-point.LivePhoto", isDirectory: true)
            if !FileManager.default.fileExists(atPath: fullDirectory.absoluteString) {
                try? FileManager.default.createDirectory(at: fullDirectory, withIntermediateDirectories: true, attributes: nil)
            }
            return fullDirectory
        }
        return nil
    }()
    
    deinit {
        clearCache()
    }
    
    private func generateKeyPhoto(from videoURL: URL) -> URL? {
        var percent:Float = 0.5
        let videoAsset = AVURLAsset(url: videoURL)
        if let stillImageTime = videoAsset.stillImageTime() {
            percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
        }
        guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
        guard let jpegData = imageFrame.jpegData(compressionQuality: 1.0) else { return nil }
        guard let url = cacheDirectory?.appendingPathComponent(UUID().uuidString).appendingPathExtension("jpg") else { return nil }
        do {
            try? jpegData.write(to: url)
            return url
        }
    }
    private func clearCache() {
        if let cacheDirectory = cacheDirectory {
            try? FileManager.default.removeItem(at: cacheDirectory)
        }
    }
    
    private func generate(from imageURL: URL?, videoURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (PHLivePhoto?, LivePhotoResources?) -> Void) {
        guard let cacheDirectory = cacheDirectory else {
            DispatchQueue.main.async {
                completion(nil, nil)
            }
            return
        }
        
        let assetIdentifier = UUID().uuidString
        let _keyPhotoURL = imageURL ?? generateKeyPhoto(from: videoURL)
        guard let keyPhotoURL = _keyPhotoURL, let pairedImageURL = addAssetID(assetIdentifier, toImage: keyPhotoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("jpg")) else {
            DispatchQueue.main.async {
                completion(nil, nil)
            }
            return
        }
        addAssetID(assetIdentifier, toVideo: videoURL, saveTo: cacheDirectory.appendingPathComponent(assetIdentifier).appendingPathExtension("mov"), progress: progress) { (_videoURL) in
            if let pairedVideoURL = _videoURL {
                _ = PHLivePhoto.request(withResourceFileURLs: [pairedVideoURL, pairedImageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable : Any]) -> Void in
                    if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
                        return
                    }
                    DispatchQueue.main.async {
                        completion(livePhoto, (pairedImageURL, pairedVideoURL))
                    }
                })
            } else {
                DispatchQueue.main.async {
                    completion(nil, nil)
                }
            }
        }
    }
    
    private func extractResources(from livePhoto: PHLivePhoto, to directoryURL: URL, completion: @escaping (LivePhotoResources?) -> Void) {
        let assetResources = PHAssetResource.assetResources(for: livePhoto)
        let group = DispatchGroup()
        var keyPhotoURL: URL?
        var videoURL: URL?
        for resource in assetResources {
            let buffer = NSMutableData()
            let options = PHAssetResourceRequestOptions()
            options.isNetworkAccessAllowed = true
            group.enter()
            PHAssetResourceManager.default().requestData(for: resource, options: options, dataReceivedHandler: { (data) in
                buffer.append(data)
            }) { (error) in
                if error == nil {
                    if resource.type == .pairedVideo {
                        videoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
                    } else {
                        keyPhotoURL = self.saveAssetResource(resource, to: directoryURL, resourceData: buffer as Data)
                    }
                } else {
                    print(error as Any)
                }
                group.leave()
            }
        }
        group.notify(queue: DispatchQueue.main) {
            guard let pairedPhotoURL = keyPhotoURL, let pairedVideoURL = videoURL else {
                completion(nil)
                return
            }
            completion((pairedPhotoURL, pairedVideoURL))
        }
    }
    
    private func extractResources(from livePhoto: PHLivePhoto, completion: @escaping (LivePhotoResources?) -> Void) {
        if let cacheDirectory = cacheDirectory {
            extractResources(from: livePhoto, to: cacheDirectory, completion: completion)
        }
    }
    
    private func saveAssetResource(_ resource: PHAssetResource, to directory: URL, resourceData: Data) -> URL? {
        let fileExtension = UTTypeCopyPreferredTagWithClass(resource.uniformTypeIdentifier as CFString,kUTTagClassFilenameExtension)?.takeRetainedValue()
        
        guard let ext = fileExtension else {
            return nil
        }
        
        var fileUrl = directory.appendingPathComponent(NSUUID().uuidString)
        fileUrl = fileUrl.appendingPathExtension(ext as String)
        
        do {
            try resourceData.write(to: fileUrl, options: [Data.WritingOptions.atomic])
        } catch {
            print("Could not save resource \(resource) to filepath \(String(describing: fileUrl))")
            return nil
        }
        
        return fileUrl
    }
    
    func addAssetID(_ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL) -> URL? {
        guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, kUTTypeJPEG, 1, nil),
              let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
              let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil),
                var imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [AnyHashable : Any] else { return nil }
        let assetIdentifierKey = "17"
        let assetIdentifierInfo = [assetIdentifierKey : assetIdentifier]
        imageProperties[kCGImagePropertyMakerAppleDictionary] = assetIdentifierInfo
        CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary)
        CGImageDestinationFinalize(imageDestination)
        return destinationURL
    }
    
    var audioReader: AVAssetReader?
    var videoReader: AVAssetReader?
    var assetWriter: AVAssetWriter?
    
    func addAssetID(_ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, progress: @escaping (CGFloat) -> Void, completion: @escaping (URL?) -> Void) {
        
        var audioWriterInput: AVAssetWriterInput?
        var audioReaderOutput: AVAssetReaderOutput?
        let videoAsset = AVURLAsset(url: videoURL)
        
        let frameCount = videoAsset.countFrames(exact: false)
        guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
            completion(nil)
            return
        }
        do {
            // Create the Asset Writer
            assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
            // Create Video Reader Output
            videoReader = try AVAssetReader(asset: videoAsset)
            let videoReaderSettings = [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)]
            let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
            videoReader?.add(videoReaderOutput)
            // Create Video Writer Input
            let ratio = (videoTrack.naturalSize.height / videoTrack.naturalSize.width)
            let vHeight: CGFloat = 1920
            let vWidth: CGFloat = vHeight / ratio
//            let vWidth: CGFloat = 1080
//            let vHeight: CGFloat = vWidth * ratio
            let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey : AVVideoCodecType.hevc, AVVideoWidthKey :vWidth, AVVideoHeightKey : vHeight])
            videoWriterInput.transform = videoTrack.preferredTransform
            videoWriterInput.expectsMediaDataInRealTime = true
            assetWriter?.add(videoWriterInput)
            // Create Audio Reader Output & Writer Input
            if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
                do {
                    let _audioReader = try AVAssetReader(asset: videoAsset)
                    let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
                    _audioReader.add(_audioReaderOutput)
                    audioReader = _audioReader
                    audioReaderOutput = _audioReaderOutput
                    let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
                    _audioWriterInput.expectsMediaDataInRealTime = false
                    assetWriter?.add(_audioWriterInput)
                    audioWriterInput = _audioWriterInput
                } catch {
                    print(error)
                }
            }
            // Create necessary identifier metadata and still image time metadata
            let assetIdentifierMetadata = metadataForAssetID(assetIdentifier)
            let stillImageTimeMetadataAdapter = createMetadataAdaptorForStillImageTime()
            assetWriter?.metadata = [assetIdentifierMetadata]
            assetWriter?.add(stillImageTimeMetadataAdapter.assetWriterInput)
            // Start the Asset Writer
            assetWriter?.startWriting()
            assetWriter?.startSession(atSourceTime: .zero)
            // Add still image metadata
            let _stillImagePercent: Float = 0.5
            stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [metadataItemForStillImageTime()],timeRange: videoAsset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: frameCount)))
            // For end of writing / progress
            var writingVideoFinished = false
            var writingAudioFinished = false
            var currentFrameCount = 0
            func didCompleteWriting() {
                guard writingAudioFinished && writingVideoFinished else { return }
                assetWriter?.finishWriting {
                    if self.assetWriter?.status == .completed {
                        completion(destinationURL)
                    } else {
                        completion(nil)
                    }
                }
            }
            // Start writing video
            if videoReader?.startReading() ?? false {
                videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "videoWriterInputQueue")) {
                    while videoWriterInput.isReadyForMoreMediaData {
                        if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()  {
                            currentFrameCount += 1
                            let percent:CGFloat = CGFloat(currentFrameCount)/CGFloat(frameCount)
                            progress(percent)
                            if !videoWriterInput.append(sampleBuffer) {
                                print("Cannot write: \(String(describing: self.assetWriter?.error?.localizedDescription))")
                                self.videoReader?.cancelReading()
                            }
                        } else {
                            videoWriterInput.markAsFinished()
                            writingVideoFinished = true
                            didCompleteWriting()
                        }
                    }
                }
            } else {
                writingVideoFinished = true
                didCompleteWriting()
            }
            // Start writing audio
            if audioReader?.startReading() ?? false {
                audioWriterInput?.requestMediaDataWhenReady(on: DispatchQueue(label: "audioWriterInputQueue")) {
                    while audioWriterInput?.isReadyForMoreMediaData ?? false {
                        guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
                            audioWriterInput?.markAsFinished()
                            writingAudioFinished = true
                            didCompleteWriting()
                            return
                        }
                        audioWriterInput?.append(sampleBuffer)
                    }
                }
            } else {
                writingAudioFinished = true
                didCompleteWriting()
            }
        } catch {
            print(error)
            completion(nil)
        }
    }
    
    private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
        let item = AVMutableMetadataItem()
        let keyContentIdentifier =  "com.apple.quicktime.content.identifier"
        let keySpaceQuickTimeMetadata = "mdta"
        item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
        item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
        item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
        item.dataType = "com.apple.metadata.datatype.UTF-8"
        return item
    }
    
    private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
        let keyStillImageTime = "com.apple.quicktime.still-image-time"
        let keySpaceQuickTimeMetadata = "mdta"
        let spec : NSDictionary = [
            kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
            "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
            kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
            "com.apple.metadata.datatype.int8"]
        var desc : CMFormatDescription? = nil
        CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
        let input = AVAssetWriterInput(mediaType: .metadata,
                                       outputSettings: nil, sourceFormatHint: desc)
        return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
    }
    
    private func metadataItemForStillImageTime() -> AVMetadataItem {
        let item = AVMutableMetadataItem()
        let keyStillImageTime = "com.apple.quicktime.still-image-time"
        let keySpaceQuickTimeMetadata = "mdta"
        item.key = keyStillImageTime as (NSCopying & NSObjectProtocol)?
        item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
        item.value = 0 as (NSCopying & NSObjectProtocol)?
        item.dataType = "com.apple.metadata.datatype.int8"
        return item
    }
    
    
    
    // This function adds metadata to a video asset
    static func copyMetadataToVideo(_ inputURL: URL, group: DispatchGroup, assetIdentifier: String,complete: @escaping ((URL?) -> Void)) {
        // Define constants related to the metadata that will be added
        let kKeySpaceQuickTimeMetadata = "mdta"
        let kKeyContentIdentifier = "com.apple.quicktime.content.identifier"
        let kKeyStillImageTime = "com.apple.quicktime.still-image-time"
        guard let cacheDirectory = shared.cacheDirectory else {
            complete(nil)
            return
        }
        // Define the input and output URLs for the video processing
        let outputURL = cacheDirectory.appendingPathComponent("video_part").appendingPathExtension("MOV")
        if FileManager.default.fileExists(atPath: outputURL.path) {
            try? FileManager.default.removeItem(at: outputURL)
        }
        guard let workingLivePhotoURL = Bundle.main.url(forResource: "NRNG9365", withExtension: "MOV")  else {
            print("file is nil")
            return complete(nil)
        }
        // Load video assets from provided URLs
        let asset = AVAsset(url: inputURL)
        let workingLivePhotoAsset = AVAsset(url: workingLivePhotoURL)

        // Ensure the asset is readable
        guard workingLivePhotoAsset.isReadable else {
            print("Failed to read working live photo asset.")
            return complete(nil)
        }

        // Create a new composition to mix video and metadata tracks
        let mixComposition = AVMutableComposition()

        // Create tracks in the composition for the video and metadata
        guard let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else {
            print("Couldn't add video track to the mix composition")
            return complete(nil)
        }

        // Create metadata tracks in the composition
        guard let metadataTrack1 = mixComposition.addMutableTrack(withMediaType: .metadata, preferredTrackID: kCMPersistentTrackID_Invalid),
              let metadataTrack2 = mixComposition.addMutableTrack(withMediaType: .metadata, preferredTrackID: kCMPersistentTrackID_Invalid) else {
            print("Couldn't add metadata tracks to the mix composition")
            return complete(nil)
        }

        // Define time ranges and insert them into the composition tracks
        do {
            try videoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: asset.tracks(withMediaType: .video)[0], at: .zero)
            try metadataTrack1.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: workingLivePhotoAsset.tracks(withMediaType: .metadata)[0], at: .zero)
            try metadataTrack2.insertTimeRange(CMTimeRange(start: .zero, duration: asset.duration), of: workingLivePhotoAsset.tracks(withMediaType: .metadata)[1], at: .zero)
        } catch {
            print("Error inserting time ranges: \(error)")
            return complete(nil)
        }
        
        // Create metadata items to be added to the video
        let contentIdentifierItem = AVMutableMetadataItem()
        contentIdentifierItem.key = kKeyContentIdentifier as (NSCopying & NSObjectProtocol)
        contentIdentifierItem.keySpace = AVMetadataKeySpace(rawValue: kKeySpaceQuickTimeMetadata)
        contentIdentifierItem.value = assetIdentifier as (NSCopying & NSObjectProtocol)

        let stillImageTimeItem = AVMutableMetadataItem()
        stillImageTimeItem.key = kKeyStillImageTime as (NSCopying & NSObjectProtocol)
        stillImageTimeItem.keySpace = AVMetadataKeySpace(rawValue: kKeySpaceQuickTimeMetadata)
        stillImageTimeItem.value = 0 as (NSCopying & NSObjectProtocol)

        // Remove output file if it already exists to avoid conflicts
        let fileManager = FileManager.default
        if fileManager.fileExists(atPath: outputURL.path) {
            do {
                try fileManager.removeItem(at: outputURL)
            } catch {
                print("Failed to delete existing output file: \(error)")
                return complete(nil)
            }
        }

        // Set up and start the video export process
        let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough)
        exporter?.outputURL = outputURL
        exporter?.outputFileType = .mov
        exporter?.metadata = [contentIdentifierItem, stillImageTimeItem]

        // Monitor the export process and handle its completion
        let runLoop = CFRunLoopGetCurrent()
        exporter?.exportAsynchronously {
            switch exporter?.status {
            case .completed:
                complete(outputURL)
                print("Export completed!")
            case .failed:
                complete(nil)
                if let error = exporter?.error {
                    print("Export failed with error: \(error.localizedDescription)")
                } else {
                    print("Export failed without a specific error.")
                }
            default:
                print("Export resulted in unknown state.")
            }
            CFRunLoopStop(runLoop)
        }
        CFRunLoopRun()
    }
    //帧数变更
    static func extractFramesAndSaveHEIC(_ videoURL: URL, group: DispatchGroup, assetIdentifier: String, complete: ((URL?) -> Void)) {
        let videoAsset = AVAsset(url: videoURL)
        
        // Ensure the video contains video tracks
        guard videoAsset.tracks(withMediaType: .video).count > 0 else {
            print("No video tracks found in asset.")
            return complete(nil)
        }
        
        // Set up the image generator to extract frames from the video
        let frameRate: Int32 = videoAsset.duration.timescale
        let imageGenerator = AVAssetImageGenerator(asset: videoAsset)
        imageGenerator.requestedTimeToleranceBefore = CMTime(value: 1, timescale: frameRate)
        imageGenerator.requestedTimeToleranceAfter = CMTime(value: 1, timescale: frameRate)
        let durationInSeconds = CMTimeGetSeconds(videoAsset.duration)
//        let totalFrames = Int(durationInSeconds * Double(frameRate))
        var images = [CGImage]()
        for frameIndex in 0..<1 {//帧数 变更
            let frameTime = CMTimeMake(value: Int64(frameIndex), timescale: frameRate)
            do {
                let imageRef = try imageGenerator.copyCGImage(at: frameTime, actualTime: nil)
                images.append(imageRef)
            } catch {
                print("Error generating frame at index \(frameIndex): \(error)")
            }
        }
        
        // Define metadata for the HEIC image
        let makerNote = NSMutableDictionary()
        makerNote.setObject(assetIdentifier, forKey: "17" as NSCopying)
        let metadata = NSMutableDictionary()
        metadata.setObject(makerNote, forKey: kCGImagePropertyMakerAppleDictionary as String as NSCopying)
        let exifVersion = NSMutableDictionary()
        exifVersion.setObject([2,2,1], forKey: kCGImagePropertyExifVersion as String as NSCopying)
        metadata.setObject(exifVersion, forKey: kCGImagePropertyExifDictionary as String as NSCopying)
        
        // Check if there are any images to save
        guard images.count > 0 else {
            print("No frames were extracted from the video.")
            return complete(nil)
        }
        
        // Save the extracted images as a HEIC file
//        let url = URL(fileURLWithPath: "/Users/yannannan/Downloads/photo_part.HEIC")
        guard let cacheDirectory = shared.cacheDirectory else {
            complete(nil)
            return complete(nil)
        }
        let url = cacheDirectory.appendingPathComponent("photo_part").appendingPathExtension("HEIC")
        if FileManager.default.fileExists(atPath: url.path) {
            try? FileManager.default.removeItem(at: url)
        }
        
        guard let destination = CGImageDestinationCreateWithURL(url as CFURL, AVFileType.heic.rawValue as CFString, images.count, nil) else {
            print("Failed to create image destination.")
            return complete(nil)
        }
        
        for image in images {
            CGImageDestinationAddImage(destination, image, metadata)
        }
        
        if !CGImageDestinationFinalize(destination) {
            complete(nil)
            print("Failed to save HEIC image.")
        } else {
            complete(url)
            print("Saved HEIC image successfully!")
        }
    }
    static func outputLivePhoto(videoURL: URL, success: @escaping (() -> Void), failure: @escaping (() -> Void)){
        let assetIdentifier = UUID().uuidString
        let group = DispatchGroup()
        group.enter()
        var vURL: URL?
        var imgURL: URL?
        self.copyMetadataToVideo(videoURL, group: group, assetIdentifier: assetIdentifier) { video in
            vURL = video
            if let video = video {
                self.extractFramesAndSaveHEIC(video, group: group, assetIdentifier: assetIdentifier) { img in
                    imgURL = img
                    group.leave()
                }
            }
        }
        group.notify(queue: .main) {
            guard let vURL = vURL, let imgURL = imgURL else{
                failure()
                return
            }
            let photoLibrary = PHPhotoLibrary.shared()

            photoLibrary.performChanges({
                let creationRequest = PHAssetCreationRequest.forAsset()
                creationRequest.addResource(with: .photo, fileURL: imgURL, options: nil)
                creationRequest.addResource(with: .pairedVideo, fileURL: vURL, options: nil)
            },
            completionHandler: { su, error in
                if su {
                    success()
                    print("Live Photo saved successfully!")
                } else if let error = error {
                    failure()
                    print("Error saving Live Photo to the library: \(error.localizedDescription)")
                    
                    // Cast the error to NSError to access the userInfo dictionary
                    if let nsError = error as NSError? {
                        for (key, value) in nsError.userInfo {
                            print("\(key): \(value)")
                        }
                    }
                }
            })
        }
    }
    
    /// 裁剪视频
    static func cutVideo(_ videoURL: URL, second: Int64, type: AVFileType, complete: @escaping ((URL?) -> Void)){
        guard let cacheDirectory = LivePhoto17.shared.cacheDirectory else {
            return
        }
        let outputURL = cacheDirectory.appendingPathComponent("slipTempVideo").appendingPathExtension("mp4")
        if FileManager.default.fileExists(atPath: outputURL.path) {
            try? FileManager.default.removeItem(at: outputURL)
        }
        
        let videoAsset = AVURLAsset(url: videoURL)
        let duration = videoAsset.duration
        let seconds = Float(duration.value) / Float(duration.timescale)
        if seconds > 1 {
            let composition = AVMutableComposition()
            let track = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
            try? track?.insertTimeRange(CMTimeRange(start: CMTime.zero, duration: CMTime(value: Int64(duration.timescale) * second, timescale: duration.timescale)), of: videoAsset.tracks(withMediaType: .video)[0], at: CMTime.zero)
            
            let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
            exportSession?.outputURL = outputURL
            exportSession?.outputFileType = type
            exportSession?.exportAsynchronously {
                complete(outputURL)
            }
            return
        }
        complete(videoURL)
    }
    /// 视频帧率装换
    static func videoFrameRateModify(_ sourceVideoPath: URL, complete: @escaping ((URL?) -> Void)){
        let ex = sourceVideoPath.pathExtension
        guard let cacheDirectory = LivePhoto17.shared.cacheDirectory else {
            return complete(nil)
        }
        let outputURL = cacheDirectory.appendingPathComponent("fpsVideo").appendingPathExtension(ex)
        if FileManager.default.fileExists(atPath: outputURL.path) {
            try? FileManager.default.removeItem(at: outputURL)
        }
        let videoAsset = AVURLAsset(url: sourceVideoPath)
        if #available(iOS 15.0, *) {
            videoAsset.loadTracks(withMediaType: .video) { listData, error in
                if let videoTrack = listData?.first{
                    Task{
                        guard let duration = try? await videoAsset.load(.duration) else { return complete(nil) }
                        guard let naturalSize = try? await videoTrack.load(.naturalSize) else { return complete(nil) }
                        let timeRange = CMTimeRange(start: .zero, duration: duration)
                //        let timeRange = CMTimeRange(start: .zero, duration: CMTime(value: videoAsset.duration.value / 2, timescale: videoAsset.duration.timescale))
                        //生成videoComposition方法1
                        let videoComposition = AVMutableVideoComposition()
                        let instruction = AVMutableVideoCompositionInstruction()
                        let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
                        instruction.layerInstructions = [layerInstruction]
                        instruction.timeRange = timeRange
                        videoComposition.instructions = [instruction]

                        //关键就是这一句，frameDuration指的是每一帧的持续时间，所以就等于帧率的倒数，如果要将视频帧率设置为24，用videoComposition.frameDuration = CMTime(value: 1, timescale: 24) 即可
                        videoComposition.sourceTrackIDForFrameTiming = kCMPersistentTrackID_Invalid
                        videoComposition.frameDuration = CMTime(value: 1, timescale: 60)
                        videoComposition.renderSize = naturalSize
                        
                        let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)
                        exportSession?.outputFileType = .mp4
                        exportSession?.outputURL = outputURL
                        exportSession?.shouldOptimizeForNetworkUse = false
                        exportSession?.videoComposition = videoComposition
                        //转帧 只生成一秒的视频
                        exportSession?.timeRange = CMTimeRange(start: .zero, end: CMTime(value: 60, timescale: 60))
                        exportSession?.exportAsynchronously(completionHandler: { [weak exportSession] in
                            if exportSession?.status == .completed {
                                complete(outputURL)
                            } else {
                                complete(nil)
                            }
                        })
                    }
                }else{
                    complete(nil)
                }
            }
        } else {
            // Fallback on earlier versions
        }
    }
}

extension LivePhoto17 {
    static func debugAction(_ videoURL: URL){
        let videoAsset = AVURLAsset(url: videoURL)
        guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { return }
        let fps = videoTrack.nominalFrameRate
        debugPrint("整个视频时长: =\(videoAsset.duration.seconds) fps = ",fps)
        debugPrint(videoURL)
    }
    static func livePhotoGenerate(_ videoURL: URL, success: @escaping (() -> Void), failure: @escaping (() -> Void)){
        debugAction(videoURL)
        livePhotoGenerateAction4(videoURL,success: success,failure: failure)
    }
    //MARK: 方案一 1、转帧 2、加速 3、裁剪 4、合成 缺点 耗时
    static func livePhotoGenerateAction1(_ videoURL: URL, success: @escaping (() -> Void), failure: @escaping (() -> Void)){
        let videoAsset = AVURLAsset(url: videoURL)
        debugPrint("整个视频时长: ",videoAsset.duration.seconds)
        let time = Date().timeIntervalSince1970
        LivePhoto17.videoFrameRateModify(videoURL) { fpsURL in
            if let fpsURL{
                debugPrint("转帧耗时 = ",Date().timeIntervalSince1970 - time)
                self.livePhotoEditVideo(fpsURL, success: success, failure: failure)
            }else{
                failure()
            }
        }
    }
    //MARK: 方案二 1、裁剪（4s） 2、转帧  3、加速 4裁剪（1s）5、合成
    static func livePhotoGenerateAction2(_ videoURL: URL, success: @escaping (() -> Void), failure: @escaping (() -> Void)){
        let videoAsset = AVURLAsset(url: videoURL)
        debugPrint("整个视频时长: ",videoAsset.duration.seconds)
        guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { return }
        let fps = videoTrack.nominalFrameRate
        if fps == 60 {
            self.cutVideo(videoURL, second: 1, type: .mp4) { url in
                guard let url = url else {
                    debugPrint("视频裁剪失败！")
                    failure()
                    return
                }
                LivePhoto17.generate(from: nil, videoURL: url, progress: { (percent) in
                }) { (livePhoto, resources) in
                    if let resources = resources {
                        LivePhoto17.outputLivePhoto(videoURL: resources.pairedVideo, success: success, failure: failure)
                        return
                    }
                    failure()
                }
            }
            return
        }
        let videosecond = Float(videoAsset.duration.value) / Float(videoAsset.duration.timescale)
        if videosecond > 4 {
            self.cutVideo(videoURL, second: 4, type: .mp4) { url in
                guard let url = url else {
                    failure()
                    return
                }
                
                self.videoFrameRateModify(url) { fpsURL in
                    guard let fpsURL = fpsURL else {
                        failure()
                        return
                    }
                    self.livePhotoEditVideo(fpsURL, success: success, failure: failure)
                }
               // self.livePhotoEditVideo(url, success: success, failure: failure)
            }
            return
        }
        self.videoFrameRateModify(videoURL) { fpsURL in
            guard let fpsURL = fpsURL else {
                failure()
                return
            }
            self.livePhotoEditVideo(fpsURL, success: success, failure: failure)
        }
      //  self.livePhotoEditVideo(videoURL, success: success, failure: failure)
    }
    //MARK: 方案三 1、转帧（并且把视频切成一秒） 2、合成 失败率还行 就是只能播放一秒的东西
    static func livePhotoGenerateAction3(_ videoURL: URL, success: @escaping (() -> Void), failure: @escaping (() -> Void)){
        let tiem = Date().timeIntervalSince1970
        LivePhoto17.videoFrameRateModify(videoURL) { fpsURL in
            if let fpsURL{
                debugPrint("转帧耗时 = ",Date().timeIntervalSince1970 - tiem)
                LivePhoto17.generate(from: nil, videoURL: fpsURL) { progress in
                    
                } completion: { livePhoto, resources in
                    if let resources{
                        LivePhoto17.outputLivePhoto(videoURL: resources.pairedVideo, success: success, failure: failure)
                    }else{
                        failure()
                    }
                }
            }else{
                failure()
            }
        }
    }
    //MARK: 方案四 1、加速 2、转帧 3、合成 优点可以播放整个视频的关键帧 缺点失败率太高 需要重写加速方法
    static func livePhotoGenerateAction4(_ videoURL: URL, success: @escaping (() -> Void), failure: @escaping (() -> Void)){
        let videoAsset = AVURLAsset(url: videoURL)
        debugPrint("整个视频时长: ",videoAsset.duration.seconds)
        let tiem = Date().timeIntervalSince1970
        guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { return }
        let fps = videoTrack.nominalFrameRate
        var scale = videoAsset.duration.seconds///2//60.0 / fps
        scale  = scale > 1 ? scale : 1
        debugPrint("fps = \(fps)")
        LivePhoto17.shared.videoBox.clean()
        LivePhoto17.shared.videoBox.appendVideo(byPath: videoURL.path)
        LivePhoto17.shared.videoBox.gearBox(withScale: CGFloat(scale))
        let gearURL = LivePhoto17.shared.cacheDirectory?.appendingPathComponent("gearVideo").appendingPathExtension("mp4")
        guard let gearURL = gearURL else {
            failure()
            return
        }
        if FileManager.default.fileExists(atPath: gearURL.path) {
            try? FileManager.default.removeItem(at: gearURL)
        }
        LivePhoto17.shared.videoBox.asyncFinishEdit(byFilePath: gearURL.path) { error in
            if error == nil {
                debugPrint("加速耗时 = ",Date().timeIntervalSince1970 - tiem)
                self.videoFrameRateModify(gearURL) { url in
                    guard let url = url else {
                        debugPrint("视频裁剪失败！")
                        failure()
                        return
                    }
                    LivePhoto17.generate(from: nil, videoURL: url, progress: { (percent) in
                    }) { (livePhoto, resources) in
                        if let resources = resources {
                            LivePhoto17.outputLivePhoto(videoURL: resources.pairedVideo, success: success, failure: failure)
                            return
                        }
                        failure()
                    }
                }
            }else{
                failure()
                debugPrint("视频倍速导出失败！")
            }
        }
    }
    private static func livePhotoEditVideo(_ videoURL: URL, success: @escaping (() -> Void), failure: @escaping (() -> Void)){
        let videoAsset = AVURLAsset(url: videoURL)
        guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { return }
        let fps = videoTrack.nominalFrameRate
        let scale = 60.0 / fps
        debugPrint("fps = \(fps)")
        
        LivePhoto17.shared.videoBox.clean()
        LivePhoto17.shared.videoBox.appendVideo(byPath: videoURL.path)
        LivePhoto17.shared.videoBox.gearBox(withScale: CGFloat(scale))
        let gearURL = LivePhoto17.shared.cacheDirectory?.appendingPathComponent("gearVideo").appendingPathExtension("mp4")
        guard let gearURL = gearURL else {
            failure()
            return
        }
        if FileManager.default.fileExists(atPath: gearURL.path) {
            try? FileManager.default.removeItem(at: gearURL)
        }
        LivePhoto17.shared.videoBox.asyncFinishEdit(byFilePath: gearURL.path) { error in
            if error == nil {
                self.cutVideo(gearURL, second: 1, type: .mp4) { url in
                    guard let url = url else {
                        debugPrint("视频裁剪失败！")
                        failure()
                        return
                    }
                    LivePhoto17.generate(from: nil, videoURL: url, progress: { (percent) in
                    }) { (livePhoto, resources) in
                        if let resources = resources {
                            LivePhoto17.outputLivePhoto(videoURL: resources.pairedVideo, success: success, failure: failure)
                            return
                        }
                        failure()
                    }
                }
            }else{
                failure()
                debugPrint("视频倍速导出失败！")
            }
        }
    }
}

fileprivate extension AVAsset {
    func countFrames(exact:Bool) -> Int {
        
        var frameCount = 0
        
        if let videoReader = try? AVAssetReader(asset: self)  {
            
            if let videoTrack = self.tracks(withMediaType: .video).first {
                
                frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
                
                
                if exact {
                    
                    frameCount = 0
                    
                    let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
                    videoReader.add(videoReaderOutput)
                    
                    videoReader.startReading()
                    
                    // count frames
                    while true {
                        let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
                        if sampleBuffer == nil {
                            break
                        }
                        frameCount += 1
                    }
                    
                    videoReader.cancelReading()
                }
                
                
            }
        }
        
        return frameCount
    }
    
    func stillImageTime() -> CMTime?  {
        
        var stillTime:CMTime? = nil
        
        if let videoReader = try? AVAssetReader(asset: self)  {
            
            if let metadataTrack = self.tracks(withMediaType: .metadata).first {
                
                let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
                
                videoReader.add(videoReaderOutput)
                
                videoReader.startReading()
                
                let keyStillImageTime = "com.apple.quicktime.still-image-time"
                let keySpaceQuickTimeMetadata = "mdta"
                
                var found = false
                
                while found == false {
                    if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
                        if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
                            let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
                            for item in group?.items ?? [] {
                                if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
                                    stillTime = group?.timeRange.start
                                    //print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
                                    found = true
                                    break
                                }
                            }
                        }
                    }
                    else {
                        break;
                    }
                }
                
                videoReader.cancelReading()
                
            }
        }
        
        return stillTime
    }
    
    func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
        
        var time = self.duration
        
        var frameCount = inFrameCount
        
        if frameCount == 0 {
            frameCount = self.countFrames(exact: true)
        }
        
        let frameDuration = Int64(Float(time.value) / Float(frameCount))
        
        time.value = Int64(Float(time.value) * percent)
        
        //print("stillImageTime = \(CMTimeGetSeconds(time))")
        
        return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
    }
    
    func getAssetFrame(percent:Float) -> UIImage?
    {
        
        let imageGenerator = AVAssetImageGenerator(asset: self)
        imageGenerator.appliesPreferredTrackTransform = true
        
        imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1,timescale: 100)
        imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1,timescale: 100)
        
        var time = self.duration
        
        time.value = Int64(Float(time.value) * percent)
        
        do {
            var actualTime = CMTime.zero
            let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
            
            let img = UIImage(cgImage: imageRef)
            
            return img
        }
        catch let error as NSError
        {
            print("Image generation failed with error \(error)")
            return nil
        }
    }
}
