////
////  VideoDecoder.swift
////  VideoPhotoCapture
////
////  Created by 杨伟诺 on 2020/10/11.
////
//
//import UIKit
//import VideoToolbox
//
//@objc protocol VideoDecoderDelegate{
//    
//    func videoDecodeCallbackWith( imageBuffer: CVPixelBuffer)
//}
//
//class VideoDecoder: NSObject {
//
//    var videoConfig: VideoConfig!
//    
//    var decodeQueue = DispatchQueue.init(label: "h264 decodeQueue")
//    
//    var callbackQueue = DispatchQueue.init(label: "h264 callbackQueue")
//    
//    var decodeSession: VTCompressionSession?
//    
//    weak var delegate: VideoDecoderDelegate?
//    
//    private var sps: UInt8 = 0
//    private var pps: UInt8 = 0
//    private var spsSize: Int = 0
//    private var ppsSize: Int = 0
//    private var decodeDesc: CMVideoFormatDescription?
//    
//    convenience init( config: VideoConfig){
//        self.init()
//        videoConfig = config
//    }
//    
//    func initDecoder() -> Bool{
//        
//        if decodeSession != nil {
//            return true
//        }
//        
//        let parameterSetPointers:[UInt8] = [sps, pps]
//        let parameterSetSizes:[Int] = [spsSize, ppsSize]
//        let naluHeaderLenght = 4
//        /**
//         根据sps pps设置解码参数
//         param kCFAllocatorDefault 分配器
//         param 2 参数个数
//         param parameterSetPointers 参数集指针
//         param parameterSetSizes 参数集大小
//         param naluHeaderLen nalu nalu start code 的长度 4
//         param _decodeDesc 解码器描述
//         return 状态
//         */
//        let p = UnsafeMutablePointer<UInt8>.allocate(capacity: MemoryLayout.size(ofValue: parameterSetPointers))
//        p.initialize(to: parameterSetPointers)
////        var ppp: UnsafePointer<UInt8> = UnsafePointer<UInt8>(parameterSetPointers)
////        let pppp: UnsafePointer<UnsafePointer<UInt8>> = UnsafePointer<UnsafePointer<UInt8>>(&ppp)
//        
////        CMVideoFormatDescriptionCreateFromH264ParameterSets(allocator: <#T##CFAllocator?#>, parameterSetCount: <#T##Int#>, parameterSetPointers: <#T##UnsafePointer<UnsafePointer<UInt8>>#>, parameterSetSizes: <#T##UnsafePointer<Int>#>, nalUnitHeaderLength: <#T##Int32#>, formatDescriptionOut: <#T##UnsafeMutablePointer<CMFormatDescription?>#>)
//        
//        var status = CMVideoFormatDescriptionCreateFromH264ParameterSets(allocator: kCFAllocatorDefault, parameterSetCount: 2, parameterSetPointers: UnsafePointer(p), parameterSetSizes: UnsafePointer(parameterSetSizes), nalUnitHeaderLength: Int32(naluHeaderLenght), formatDescriptionOut: &decodeDesc)
//        
//        if status != 0 {
//            print("DecodeSession create H264ParameterSets(sps, pps) failed")
//            return false
//        }
//        
//        /*
//         解码参数:
//        * kCVPixelBufferPixelFormatTypeKey:摄像头的输出数据格式
//         kCVPixelBufferPixelFormatTypeKey，已测可用值为
//            kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange，即420v
//            kCVPixelFormatType_420YpCbCr8BiPlanarFullRange，即420f
//            kCVPixelFormatType_32BGRA，iOS在内部进行YUV至BGRA格式转换
//         YUV420一般用于标清视频，YUV422用于高清视频，这里的限制让人感到意外。但是，在相同条件下，YUV420计算耗时和传输压力比YUV422都小。
//         
//        * kCVPixelBufferWidthKey/kCVPixelBufferHeightKey: 视频源的分辨率 width*height
//         * kCVPixelBufferOpenGLCompatibilityKey : 它允许在 OpenGL 的上下文中直接绘制解码后的图像，而不是从总线和 CPU 之间复制数据。这有时候被称为零拷贝通道，因为在绘制过程中没有解码的图像被拷贝.
//         
//         */
//        let destinationPixBufferAttrs = [kCVPixelBufferPixelFormatTypeKey:NSNumber.init(value: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange),
//                                         kCVPixelBufferWidthKey:NSNumber.init(value: videoConfig.width),
//                                         kCVPixelBufferHeightKey:NSNumber.init(value: videoConfig.height),
//                                         kCVPixelBufferOpenGLCompatibilityKey:NSNumber.init(value: true)]
//        
//        //解码回调设置
//        /**
//         VTDecompressionOutputCallbackRecord 是一个简单的结构体，它带有一个指针 (decompressionOutputCallback)，指向帧解压完成后的回调方法。你需要提供可以找到这个回调方法的实例 (decompressionOutputRefCon)。VTDecompressionOutputCallback 回调方法包括七个参数：
//                参数1: 回调的引用
//                参数2: 帧的引用
//                参数3: 一个状态标识 (包含未定义的代码)
//                参数4: 指示同步/异步解码，或者解码器是否打算丢帧的标识
//                参数5: 实际图像的缓冲
//                参数6: 出现的时间戳
//                参数7: 出现的持续时间
//         */
//        var callbackRecord: VTDecompressionOutputCallbackRecord = VTDecompressionOutputCallbackRecord()
//        callbackRecord.decompressionOutputCallback = videoDecompressionOutputCallback
//        callbackRecord.decompressionOutputRefCon = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque())
//        
////        let callbackRecordP: UnsafePointer = UnsafePointer()
//        
//        status = VTDecompressionSessionCreate(allocator: kCFAllocatorDefault, formatDescription: decodeDesc!, decoderSpecification: nil, imageBufferAttributes: unsafeBitCast(destinationPixBufferAttrs, to: CFDictionary.self), outputCallback: unsafeBitCast(callbackRecord, to: UnsafePointer.self), decompressionSessionOut: unsafeBitCast(decodeSession, to: UnsafeMutablePointer.self))
//
//        if status != 0 {
//            print("DecodeSession create failed")
//            return false
//        }
//        
//        
//        status = VTSessionSetProperty(decodeSession as VTSession, key: kVTDecompressionPropertyKey_RealTime, value: kCFBooleanTrue)
//        return true
//    }
//    
//    func decodeWith(frame: UnsafeMutablePointer<UInt8>, size: UInt32) -> CVPixelBuffer?{
//        
//        var outputPixelBuffer:CVPixelBuffer?
//        var blockBuffer: CMBlockBuffer?
//        let flag0: CMBlockBufferFlags = 0
//        
//        var status = CMBlockBufferCreateWithMemoryBlock(allocator: kCFAllocatorDefault, memoryBlock: frame, blockLength: Int(size), blockAllocator: kCFAllocatorNull, customBlockSource: nil, offsetToData: 0, dataLength: Int(size), flags: flag0, blockBufferOut: UnsafeMutablePointer(&blockBuffer))
//        
//        if status != kCMBlockBufferNoErr{
//            print("")
//            return outputPixelBuffer
//        }
//        
//        var sampleBuffer: CMSampleBuffer?
//        let sampleSizeArray:[UInt32] = [size]
//        
//        status = CMSampleBufferCreateReady(allocator: kCFAllocatorDefault, dataBuffer: blockBuffer, formatDescription: decodeDesc, sampleCount: 1, sampleTimingEntryCount: 0, sampleTimingArray: nil, sampleSizeEntryCount: 1, sampleSizeArray: unsafeBitCast(sampleSizeArray, to: UnsafePointer.self), sampleBufferOut: unsafeBitCast(sampleBuffer, to: UnsafeMutablePointer.self))
//        
//        if status != 0 || sampleBuffer == nil {
//            print("decode create sampleBuffer failed")
//            return nil
//        }
//        
//        let flag1:VTDecodeFrameFlags = VTDecodeFrameFlags._1xRealTimePlayback
//        let infoFlag: VTDecodeInfoFlags = VTDecodeInfoFlags.asynchronous
//        
//        status = VTDecompressionSessionDecodeFrame(decodeSession! as! VTDecompressionSession, sampleBuffer: sampleBuffer!, flags: flag1, frameRefcon: unsafeBitCast(outputPixelBuffer, to: UnsafeMutableRawPointer.self), infoFlagsOut: unsafeBitCast(infoFlag, to: UnsafeMutablePointer.self))
//        
//        if status == kVTInvalidSessionErr{
//            print("decode  InvalidSessionErr")
//        }else if status == kVTVideoDecoderBadDataErr{
//            print("decode  BadData")
//        }else if status != 0{
//            print("decode  failed")
//        }
//        return outputPixelBuffer
//    }
//    
//    
//    func decodeNaluDataWith(frame: UnsafeMutablePointer<UInt8>, size: UInt32){
//        //数据类型:frame的前4个字节是NALU数据的开始码，也就是00 00 00 01，
//        // 第5个字节是表示数据类型，转为10进制后，7是sps, 8是pps, 5是IDR（I帧）信息
//        let type = frame[4] & 0x1F
//        // 将NALU的开始码转为4字节大端NALU的长度信息
//        var naluSize: UInt32 = size - 4
//        var rawP:UnsafeMutableRawPointer = UnsafeMutableRawPointer(&naluSize)
//        let pNaluSize: UnsafeMutablePointer<UInt8> = rawP.bindMemory(to: UInt8.self, capacity: naluSize.bitWidth)
//        var pixelBuffer: CVPixelBuffer?
//        frame[0] = unsafeBitCast((pNaluSize + 3), to: UInt8.self)
//        frame[1] = unsafeBitCast((pNaluSize + 2), to: UInt8.self)
//        frame[2] = unsafeBitCast((pNaluSize + 1), to: UInt8.self)
//        frame[3] = unsafeBitCast(pNaluSize, to: UInt8.self)
//        
//        switch type {
//        case 0x05://关键帧
//            if self.initDecoder(){
//                pixelBuffer = self.decodeWith(frame: frame, size: size)
//            }
//            break
//            
//        case 0x06://增强信息
//            break
//            
//        case 0x07://sps
//            spsSize = Int(naluSize)
//            sps = unsafeBitCast(malloc(spsSize), to: UInt8.self)
//            memcpy(unsafeBitCast(sps, to: UnsafeMutableRawPointer.self), unsafeBitCast(frame[4], to: UnsafeRawPointer.self), spsSize)
//            break
//            
//        case 0x08://pps
//            ppsSize = Int(naluSize)
//            pps = unsafeBitCast(malloc(ppsSize), to: UInt8.self)
//            memcpy(unsafeBitCast(pps, to: UnsafeMutableRawPointer.self), unsafeBitCast(frame[4], to: UnsafeRawPointer.self), ppsSize)
//            break
//        default://其他帧（1-5）
//            if self.initDecoder(){
//                pixelBuffer = self.decodeWith(frame: frame, size: size)
//            }
//            break
//        }
//    }
//    
//    func decodeNaluData(frame: UnsafeMutablePointer<NSData>){
//
//        decodeQueue.async {
//            self.decodeNaluDataWith(frame: unsafeBitCast(frame.pointee.bytes, to: UnsafeMutablePointer<UInt8>.self), size: UInt32(frame.pointee.length))
//        }
//    }
//
//    private var videoDecompressionOutputCallback: VTDecompressionOutputCallback = {(
//                                                    decompressionOutputRef:UnsafeMutableRawPointer?,
//                                                    sourceFrameRef:UnsafeMutableRawPointer?,
//                                                    status:OSStatus ,
//                                                    infoFlags:VTDecodeInfoFlags,
//                                                    imageBuffer:CVImageBuffer?,
//                                                    presentationTimeStamp:CMTime,
//                                                    presentationDuration:CMTime) in
//        
//        if status != 0{
//            print("decode callback error")
//            return
//        }
//        
//        guard let outputPixelBuffer = imageBuffer else {print("decode callback error");return}
//        let decoder = unsafeBitCast(decompressionOutputRef, to: VideoDecoder.self)
//        decoder.callbackQueue.async {
//            decoder.delegate?.videoDecodeCallbackWith(imageBuffer: outputPixelBuffer)
//        }
//        
//    }
//}
