////
////  VideoEncoder.swift
////  VideoPhotoCapture
////
////  Created by 杨伟诺 on 2020/10/10.
////
//
//import UIKit
//import AVFoundation
//import VideoToolbox
//
//@objc protocol VideoEncoderDelegate{
//    
//    func videoEncodeCallbackWith(h264Data: Data)
//    
//    func videoEncodeCallbackWith(sps: Data, pps: Data)
//}
//
//class VideoEncoder: NSObject {
//    
//    var encodeQueue = DispatchQueue.init(label: "h264 encodeQueue")
//    
//    var callbackQueue = DispatchQueue.init(label: "h264 callbackQueue")
//    
//    var encodeSession: VTCompressionSession?
//    
//    var frameID: CLong = 0
//    
//    var hasSpsPps: Bool = false
//
//    var videoConfig: VideoConfig!
//    
//    weak var delegate: VideoEncoderDelegate?
//    
//    convenience init(config: VideoConfig){
//        self.init()
//        videoConfig = config
//        //编码会话
//        var status = VTCompressionSessionCreate(allocator: kCFAllocatorDefault, width: Int32(config.width), height: Int32(config.height), codecType: kCMVideoCodecType_H264, encoderSpecification: nil, imageBufferAttributes: nil, compressedDataAllocator: nil, outputCallback: videoEncodeCallback, refcon: UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()), compressionSessionOut: &encodeSession)
//        
//        if status != 0{
//            print("VTCompressionSession create failed")
//            return
//        }
//        
//        //设置是否实时执行
//        status = VTSessionSetProperty(encodeSession as VTSession, key: kVTCompressionPropertyKey_RealTime, value: kCFBooleanTrue)
//        
//        //指定编码比特流的配置文件和级别。直播一般使用baseline，可减少由于b帧带来的延时
//        status = VTSessionSetProperty(encodeSession as VTSession, key: kVTCompressionPropertyKey_ProfileLevel, value: kVTProfileLevel_H264_Baseline_AutoLevel)
//        
//        //设置码率均值(比特率可以高于此。默认比特率为零，表示视频编码器。应该确定压缩数据的大小。注意，比特率设置只在定时时有效）
//        status = VTSessionSetProperty(encodeSession as VTSession, key: kVTCompressionPropertyKey_AverageBitRate, value: unsafeBitCast(config.bitrate, to: CFNumber.self))
//        
//        //码率限制(只在定时时起作用)*待确认
//        let limits = unsafeBitCast([config.bitrate / 4, config.bitrate / 4], to: CFArray.self)
//        status = VTSessionSetProperty(encodeSession as VTSession, key: kVTCompressionPropertyKey_DataRateLimits,value: limits)
//        
//        //设置关键帧间隔(GOPSize)GOP太大图像会模糊
//        status = VTSessionSetProperty(encodeSession as VTSession, key: kVTCompressionPropertyKey_MaxKeyFrameInterval, value: unsafeBitCast(config.fps * 2, to: CFNumber.self))
//        
//        //设置fps(预期)
//        status = VTSessionSetProperty(encodeSession as VTSession, key: kVTCompressionPropertyKey_ExpectedFrameRate, value: unsafeBitCast(config.fps, to: CFNumber.self))
//        
//        //准备编码
//        guard encodeSession != nil else{return}
//        status = VTCompressionSessionPrepareToEncodeFrames(encodeSession!);
//    }
//    
//    func encodeVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer){
//        
//        encodeQueue.async { [self] in
//            
//            let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
//            frameID += 1
//            let timeStamp: CMTime = CMTimeMake(value: Int64(frameID), timescale: 1000)
//            let duration: CMTime = CMTime.invalid
//            var flags: VTEncodeInfoFlags = VTEncodeInfoFlags(rawValue: 0)
//            
//            VTCompressionSessionEncodeFrame(encodeSession!, imageBuffer: imageBuffer!, presentationTimeStamp: timeStamp, duration: duration, frameProperties: nil, sourceFrameRefcon: nil, infoFlagsOut: &flags)
//        }
//    }
//    
//    private var videoEncodeCallback: VTCompressionOutputCallback = {(outputCallbackRef:UnsafeMutableRawPointer?,
//                                                            sourceFrameRef:UnsafeMutableRawPointer?,
//                                                            status: OSStatus,
//                                                            infoFlags: VTEncodeInfoFlags,
//                                                            sampleBuffer: CMSampleBuffer?) in
//            
//        if status != 0{
//            print("encode error")
//            return
//        }
//
//        guard let buffer = sampleBuffer else {
//            return
//        }
//
//        if !CMSampleBufferDataIsReady(buffer){
//            print("data not ready")
//            return
//        }
//        var encoder = unsafeBitCast(outputCallbackRef, to: VideoEncoder.self)
//        var keyFrame = false
//        let attachArray = CMSampleBufferGetSampleAttachmentsArray(buffer, createIfNecessary: true)
//        keyFrame = !CFDictionaryContainsKey(unsafeBitCast(CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(buffer, createIfNecessary: true), 0), to: CFDictionary.self), unsafeBitCast(kCMSampleAttachmentKey_NotSync, to: UnsafeRawPointer.self))
//        
//        let naluStart:[UInt8] = [UInt8](repeating: 0x00, count: 4)
//        
//        if keyFrame == true && encoder.hasSpsPps{
//            var spsSize: size_t = 0
//            var spsCount: size_t = 0
//            var ppsSize: size_t = 0
//            var ppsCount: size_t = 0
//            var spsData: __uint8_t = 0
//            var ppsData: __uint8_t = 0
//            
//            let formatDesc = CMSampleBufferGetFormatDescription(buffer)
//            let status1 = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDesc!, parameterSetIndex: 0, parameterSetPointerOut: unsafeBitCast(spsData, to: UnsafeMutablePointer.self), parameterSetSizeOut: UnsafeMutablePointer<Int>(bitPattern: spsSize), parameterSetCountOut: UnsafeMutablePointer<Int>(bitPattern: spsCount), nalUnitHeaderLengthOut: UnsafeMutablePointer<Int32>(bitPattern: 0))
//            
//            let status2 = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDesc!, parameterSetIndex: 1, parameterSetPointerOut: unsafeBitCast(ppsData, to: UnsafeMutablePointer.self), parameterSetSizeOut: UnsafeMutablePointer<Int>(bitPattern: ppsSize), parameterSetCountOut: UnsafeMutablePointer<Int>(bitPattern: ppsCount), nalUnitHeaderLengthOut: UnsafeMutablePointer<Int32>(bitPattern: 0))
//            
//            if status1 == 0 && status2 == 0 {
//                
//                encoder.hasSpsPps = true
//                
//                //sps data
//                let sps = NSMutableData.init(capacity: 4 + spsSize)
//                sps?.append(naluStart, length: 4)
//                sps?.append(&spsData, length: spsSize)
//                
//                //pps data
//                let pps = NSMutableData.init(capacity: 4 + spsSize)
//                pps?.append(naluStart, length: 4)
//                pps?.append(&ppsData, length: ppsSize)
//                
//                encoder.callbackQueue.async {
//                    guard let spsD = sps,  let ppsD = pps else {return}
//                    encoder.delegate?.videoEncodeCallbackWith(sps: Data.init(referencing: spsD), pps: Data.init(referencing: ppsD))
//                }
//            }
//            else
//            {
//                print("get sps/pps failed")
//            }
//        }
//        
//        var lengthOffset: size_t = 0
//        var totalLength: size_t = 0
//        var dataPiont: UnsafeMutablePointer<Int8>?
//        
//        let blockBuffer = CMSampleBufferGetDataBuffer(buffer)
//        let error = CMBlockBufferGetDataPointer(blockBuffer!, atOffset: 0, lengthAtOffsetOut: &lengthOffset, totalLengthOut: &totalLength, dataPointerOut: &dataPiont)
//        
//        if error != kCMBlockBufferNoErr{
//            print("get datapoint failed")
//            return
//        }
//        
//        var offset: size_t = 0
//        
//        let lenghtInfoSize = 4
//        guard dataPiont != nil else {return}
//        while offset < totalLength - lenghtInfoSize {
//            var naluLenght: __uint32_t = 0
//            //获取nalu 数据长度
//            memcpy(&naluLenght, dataPiont, lenghtInfoSize)
//            //大端转小端
//            naluLenght = CFSwapInt32BigToHost(naluLenght)
//            let data = NSMutableData.init(capacity: 4 + Int(naluLenght))
//            data?.append(naluStart, length: 4)
//            data?.append(dataPiont! + offset + lenghtInfoSize, length: Int(naluLenght))
//            
//            encoder.callbackQueue.async {
//                guard let d = data else {return}
//                encoder.delegate?.videoEncodeCallbackWith(h264Data: d as Data)
//            }
//            
//            offset += lenghtInfoSize + Int(naluLenght)
//        }
//    }
//    
//    
//    deinit {
//        if encodeSession != nil {
//            VTCompressionSessionCompleteFrames(encodeSession!, untilPresentationTimeStamp: CMTime.invalid)
//            VTCompressionSessionInvalidate(encodeSession!)
//            encodeSession = nil
//        }
//    }
//}
//
//
//
