//
//  CompressVideo.swift
//  TestDemo
//
//  Created by ayin on 2019/2/19.
//  Copyright © 2019 aYin. All rights reserved.
//

import UIKit

class CompressVideo: NSObject {
    var asset: AVAsset?
    var videoComposition: AVVideoComposition?
    var audioMix: AVAudioMix?
    var outputFileType:AVFileType = .mp4
    var outputVideoUrl:URL = URL.init(fileURLWithPath: NSTemporaryDirectory())
//    var videoCompressSettings:[String : Any] = [:]
//    var audioCompressSettings:[String : Any] = [:]
    
    
    
    var timeRange:CMTimeRange = CMTimeRangeMake(start: CMTime.zero, duration: CMTime.positiveInfinity)
    
    
    convenience init(asset:AVAsset) {
        self.init()
    }
    
    func compressVideo(withProgress progress:((Float) -> Void),completion:((Float) -> Void)) {
    
    }
    
    
    //自定义压缩
    func customCompressVideo2(_ videoUrl: URL,outputVideoUrl: URL,_ progress: @escaping ((_ progress: Float) -> Void),_ completion: @escaping ((_ compressResult: CompressVideoState, _ url: URL?) -> Void)) {
        let avAsset = AVAsset(url: videoUrl)
        var lastSamplePresentationTime:CMTime = CMTime()
        
        var duration:TimeInterval = 0
        var progress:Float = 0
        
        if let reader = try? AVAssetReader(asset: avAsset),let writer = try? AVAssetWriter(outputURL: outputVideoUrl, fileType: .mp4) {
            reader.timeRange = timeRange
            
            //video
            let videoTracks = avAsset.tracks(withMediaType: .video)
            let videoOutput = AVAssetReaderVideoCompositionOutput(videoTracks: videoTracks, videoSettings: nil)
            videoOutput.alwaysCopiesSampleData = false
            if reader.canAdd(videoOutput) {
                reader.add(videoOutput)
            }
            
            let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: self.videoCompressSettings())
            videoInput.expectsMediaDataInRealTime = false
            if writer.canAdd(videoInput) {
                writer.add(videoInput)
            }
            
            
            let pixelBufferAttributes:[String : Any] = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA,
                                                        kCVPixelBufferWidthKey as String : videoOutput.videoComposition?.renderSize.width ?? 0,
                                                        kCVPixelBufferHeightKey as String : videoOutput.videoComposition?.renderSize.height ?? 0,
                                                        "IOSurfaceOpenGLESTextureCompatibility" : true,
                                                        "IOSurfaceOpenGLESFBOCompatibility" : true]
            let videoPixelBufferAdaptor:AVAssetWriterInputPixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: pixelBufferAttributes)
            
            
            //audio
            let audioTracks = avAsset.tracks(withMediaType: .audio)
            let audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracks, audioSettings: nil)
            audioOutput.alwaysCopiesSampleData = false
            if reader.canAdd(audioOutput) {
                reader.add(audioOutput)
            }
            
            let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: self.audioCompressSettings())
            audioInput.expectsMediaDataInRealTime = false
            if writer.canAdd(audioInput) {
                writer.add(audioInput)
            }
            
            //开始读写
            reader.startReading()
            writer.startWriting()
            writer.startSession(atSourceTime: timeRange.start)
            
            var videoCompleted = false
            var audioCompleted = false
            let inputQueue = DispatchQueue(label: "com.testDemo.inputQueue")
            videoInput.requestMediaDataWhenReady(on: inputQueue) {
                while videoInput.isReadyForMoreMediaData {
                    if let sampleBuffer:CMSampleBuffer = videoOutput.copyNextSampleBuffer() {
                        var handled = false
                        var err = false
                        if reader.status != .reading || writer.status != .writing {
                            handled = true
                            err = true
                        }
                        if !handled {
                            lastSamplePresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
                            lastSamplePresentationTime = CMTimeSubtract(lastSamplePresentationTime, self.timeRange.start)
                            progress = Float(duration == 0 ? 1 : CMTimeGetSeconds(lastSamplePresentationTime) / duration)
                            
                        }
                        if !handled,!videoInput.append(sampleBuffer) {
                            err = true
                        }
                        if err {
                            
                        }
                        
                    }else {
                        videoInput.markAsFinished()
                        
                    }
                }
            }
        }
    }
    
    //指定音视频的压缩码率，profile，帧率等关键参数信息
    func videoCompressSettings() -> [String : Any] {
        let compressionProperties: [String : Any] = [AVVideoAverageBitRateKey : 1000 * 1000,
                                                     AVVideoExpectedSourceFrameRateKey : 25,
                                                     AVVideoProfileLevelKey : AVVideoProfileLevelH264HighAutoLevel]
        let videoCompreesSetings: [String : Any] = [AVVideoCodecKey : AVVideoCodecH264,
                                                    AVVideoWidthKey : 540,
                                                    AVVideoHeightKey : 960,
                                                    AVVideoCompressionPropertiesKey :compressionProperties]
        return videoCompreesSetings
    }
    
    func audioCompressSettings() -> [String: Any] {
        var stereoChannelLayout = AudioChannelLayout()
        stereoChannelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo
        stereoChannelLayout.mChannelBitmap = .bit_Left
        stereoChannelLayout.mNumberChannelDescriptions = 0
        let channelLayoutAsData = Data.init(bytes: &stereoChannelLayout, count: 100)
        let audioCompressSettings: [String : Any] = [AVFormatIDKey : kAudioFormatMPEG4AAC,
                                                     AVEncoderBitRateKey : 96000,
                                                     AVSampleRateKey : 44100,
                                                     AVNumberOfChannelsKey : 2,
                                                     AVChannelLayoutKey : channelLayoutAsData]
        return audioCompressSettings
    }
}
