//
//  ZLVideoEncoder.m
//  Video
//
//  Created by os on 2021/3/29.
//

#import "ZLVideoEncoder.h"
#import <VideoToolBox/VideoToolbox.h>

@interface ZLVideoEncoder ()

/// 编码队列
@property (nonatomic, strong) dispatch_queue_t encodeQueue;

/// 回调队列
@property (nonatomic, strong) dispatch_queue_t callbackQueue;

/// 编码会话
@property (nonatomic) VTCompressionSessionRef encodeSession;

@end

@implementation ZLVideoEncoder {
    long frameID; // 帧的递增标识
    BOOL hasSpsPps; // 判断是否已经获取到 sps 和 pps
}

/// startcode 长度 4
const Byte startCode[] = "\x00\x00\x00\x01";

/// 编码成功回调
void VideoEncodeCallback(void * CM_NULLABLE outputCallbackRefCon,
                         void * CM_NULLABLE sourceFrameRefCon,
                         OSStatus status,
                         VTEncodeInfoFlags infoFlags,
                         CM_NULLABLE CMSampleBufferRef sampleBuffer ) {
    
    if (status != noErr) {
        NSLog(@"VideoEncodeCallBack: data is not ready");
        return;
    }
    
    ZLVideoEncoder *encoder = (__bridge ZLVideoEncoder *)outputCallbackRefCon;
    
    // 判断是否为关键帧
    BOOL keyFrame = NO;
    CFArrayRef attachArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
    keyFrame = !CFDictionaryContainsKey(CFArrayGetValueAtIndex(attachArray, 0), kCMSampleAttachmentKey_NotSync); // 注意取反操作
    
    // 获取 sps & pps 数据，只需获取一次，保存在 h264 文件开头即可
    if (keyFrame && !encoder->hasSpsPps) {
        size_t spsSize, spsCount;
        size_t ppsSize, ppsCount;
        const uint8_t *spsData, *ppsData;
        
        // 获取图像源格式
        CMFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
        OSStatus status1 = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDesc, 0, &spsData, &spsSize, &spsCount, 0);
        OSStatus status2 = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(formatDesc, 1, &ppsData, &ppsSize, &ppsCount, 0);
        
        // 判断 sps & pps 获取成功
        if (status1 == noErr & status2 == noErr) {
            NSLog(@"VideoEncodeCallback： get sps, pps success");
            encoder->hasSpsPps = true;
            
            // sps data
            NSMutableData *sps = [NSMutableData dataWithCapacity:4 + spsSize];
            [sps appendBytes:startCode length:4];
            [sps appendBytes:spsData length:spsSize];
            
            // pps data
            NSMutableData *pps = [NSMutableData dataWithCapacity:4 + ppsSize];
            [pps appendBytes:startCode length:4];
            [pps appendBytes:ppsData length:ppsSize];
            
            dispatch_async(encoder.callbackQueue, ^{
                // 回调方法传递 sps pps
                [encoder.delegate videoEncodeCallbackSps:sps pps:pps];
            });
            
        } else {
            NSLog(@"VideoEncodeCallback： get sps/pps failed spsStatus = %d, ppsStatus = %d", status1, status2);
        }
    }
    
    // 获取 NALU 数据
    size_t lengthAtOffset, totalLength;
    char *dataPoint;
    
    // 将数据复制到 datapoint
    CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
    OSStatus error = CMBlockBufferGetDataPointer(blockBuffer, 0, &lengthAtOffset, &totalLength, &dataPoint);
    if (error != kCMBlockBufferNoErr) {
        NSLog(@"VideoEncodeCallback: get datapoint failed, status = %d", error);
        return;
    }
    
    // 循环获取 nalu 数据
    size_t offset = 0;
    // 返回的nalu数据前四个字节不是0001的 startcode 不是系统端的 0001，而是大端模式的帧长度 length
    const int lengthInfoSize = 4;
    
    while (offset < (totalLength - lengthInfoSize)) {
        uint32_t naluLength = 0;
        // 获取 nalu 数据长度
        memcpy(&naluLength, dataPoint + offset, lengthInfoSize);
        // 大端转系统端
        naluLength = CFSwapInt32BigToHost(naluLength);
        // 获取到编码好的视频数据
        NSMutableData *data = [NSMutableData dataWithCapacity:4 + naluLength];
        [data appendBytes:startCode length:4];
        [data appendBytes:(dataPoint + offset + lengthInfoSize) length:naluLength];
        
        // 将 nalu 数据回调到代理中
        dispatch_async(encoder.callbackQueue, ^{
            [encoder.delegate videoEncodeCallback:data];
        });
        
        // 移动下标，继续读取下一个数据
        offset += lengthInfoSize + naluLength;
    }
}

/// 1. 初始化编码参数
- (instancetype)initWithConfig:(ZLVideoConfig *)config {
    
    if (self = [super init]) {
        _config = config;
        _encodeQueue = dispatch_queue_create("h264 hard encode queue", DISPATCH_QUEUE_SERIAL);
        _callbackQueue = dispatch_queue_create("h264 hard encode callback queue", DISPATCH_QUEUE_SERIAL);
        
        // 编码设置
        // 创建编码会话
        OSStatus status = VTCompressionSessionCreate(kCFAllocatorDefault, (int32_t)_config.width, (int32_t)_config.height, kCMVideoCodecType_H264, NULL, NULL, NULL, VideoEncodeCallback, (__bridge void * _Nullable)(self), &_encodeSession);
        if (status != noErr) {
            NSLog(@"VTCompressionSessionCreate failed status = %d", status);
            return self;
        }
        
        // 设置编码器属性
        // 设置是否实时执行
        status = VTSessionSetProperty(_encodeSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
        NSLog(@"VTSessionSetProperty: set RealTime return: %d", status);
        
        // 指定编码比特流的配置文件和级别。直播一般使用baseline，可减少由于b帧带来的延时
        status = VTSessionSetProperty(_encodeSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
        NSLog(@"VTSessionSetProperty: set profile return: %d", status);
        
        // 设置码率均值(比特率可以高于此。默认比特率为零，表示视频编码器。应该确定压缩数据的大小。注意，比特率设置只在定时时有效）
        CFNumberRef bit = (__bridge CFNumberRef)@(_config.bitrate);
        status = VTSessionSetProperty(_encodeSession, kVTCompressionPropertyKey_AverageBitRate, bit);
        NSLog(@"VTSessionSetProperty: set AverageBitRate return: %d", status);
        
        // 码率限制(只在定时时起作用)*待确认
        CFArrayRef limits = (__bridge CFArrayRef)@[@(_config.bitrate / 4), @(_config.bitrate * 4)];
        status = VTSessionSetProperty(_encodeSession, kVTCompressionPropertyKey_DataRateLimits, limits);
        NSLog(@"VTSessionSetProperty: set DataRateLimits return: %d", status);
        
        // 设置关键帧间隔(GOPSize)GOP太大图像会模糊
        CFNumberRef maxKeyFrameInterval = (__bridge CFNumberRef)@(_config.fps * 2);
        status = VTSessionSetProperty(_encodeSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, maxKeyFrameInterval);
        NSLog(@"VTSessionSetProperty: set MaxKeyFrameInterval return: %d", status);
        
        // 设置fps(预期)
        CFNumberRef expectedFrameRate = (__bridge CFNumberRef)@(_config.fps);
        status = VTSessionSetProperty(_encodeSession, kVTCompressionPropertyKey_ExpectedFrameRate, expectedFrameRate);
        NSLog(@"VTSessionSetProperty: set ExpectedFrameRate return: %d", status);
        
        // 准备编码
        status = VTCompressionSessionPrepareToEncodeFrames(_encodeSession);
        NSLog(@"VTSessionSetProperty: set PrepareToEncodeFrames return: %d", status);
        
    }
    return self;
    
}

/// 2. 获取到 samplebuffer 数据，进行 h264 编码
- (void)encodeVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer {
    CFRetain(sampleBuffer);
    dispatch_async(_encodeQueue, ^{
        // 帧数据
        CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
        // 该帧的时间戳
        frameID++;
        CMTime timeStamp = CMTimeMake(frameID, 1000);
        // 持续时间
        CMTime duration = kCMTimeInvalid;
        // 编码
        VTEncodeInfoFlags flags;
        OSStatus status = VTCompressionSessionEncodeFrame(self.encodeSession, imageBuffer, timeStamp, duration, NULL, NULL, &flags);
        if (status != noErr) {
            NSLog(@"VTCompression: encode failed: status = %d", status);
        }
        CFRelease(sampleBuffer);
    });
}

- (void)dealloc {
    if (_encodeSession) {
        VTCompressionSessionCompleteFrames(_encodeSession, kCMTimeInvalid);
        VTCompressionSessionInvalidate(_encodeSession);
        
        CFRelease(_encodeSession);
        _encodeSession = NULL;
    }
}

@end
