//
//  H264Encoder.m
//  SQVideoCapture
//
//  Created by ToothBond on 2017/5/4.
//  Copyright © 2017年 rensq. All rights reserved.
//

#import "H264Encoder.h"
#import <VideoToolbox/VideoToolbox.h>

@interface H264Encoder ()

@property(nonatomic,assign)VTCompressionSessionRef compressionSession;
@property(nonatomic,assign)int  frameIndex;

@property(nonatomic,strong)NSFileHandle *fileHandle;

@end

@implementation H264Encoder

- (void)prepareEncodeWithWidth:(int)width height:(int)height
{
    //0.创建希尔文件的NSFileHandle对象
    NSString *filePath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) firstObject] stringByAppendingPathComponent:@"123.h264"];
    NSLog(@"filePath = %@",filePath);
    
    [[NSFileManager defaultManager] removeItemAtPath:filePath error:NULL];
    [[NSFileManager defaultManager] createFileAtPath:filePath contents:nil attributes:nil];
    
    self.fileHandle = [NSFileHandle fileHandleForWritingAtPath:filePath];
    
    
    self.frameIndex = 0;
    //1.创建VTCompressionSessionRef
    // CFAllocatorRef 分配的内存
    // width / height 视频的宽高
    // kCMVideoCodecType 编码标准
    // CFDictionaryRef  一般用默认NULL
    //VTCompressionOutputCallback 编码成功后的回调函数
    //outputCallbackRefCon 上下文 便于在C语言的函数中获取
    //VTCompressionSessionRef  创建session对象的指针
    
    VTCompressionSessionCreate(NULL, width, height,  kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressionCallback, (__bridge void * _Nullable)(self), &_compressionSession);
    
    //2.设置属性
    //2.1设置实时输出
    VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
    //2.2设置帧率
    VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef _Nonnull)(@24));
    
    //2.3设置比特率（码率）
    VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef _Nonnull)(@1500000));
    VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFTypeRef _Nonnull)(@[@(1500000/8),@1]));
    
    //2.4设置GOP的大小
    VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef _Nonnull)(@20));

    //3.准备编码
    VTCompressionSessionPrepareToEncodeFrames(self.compressionSession);
}

- (void)encodeFrame:(CMSampleBufferRef)sampleBuffer
{
    //1.将CMSampleBufferRef转成CVImageBufferRef
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
    
    //2.开始编码
    //CVImageBufferRef imageBuffer 转换
    //CMTime presentationTimeStamp PTS/DTS 展示的时间戳/编码的时间戳
    //CMTime duration
    //sourceFrameRefCon 对应回调函数中的参数
    //infoFlagsOut 对应回调函数中的参数
    CMTime PTS = CMTimeMake(self.frameIndex, 24);//第几帧，帧率
    VTCompressionSessionEncodeFrame(self.compressionSession, imageBuffer, PTS, kCMTimeInvalid, NULL, NULL, NULL);
    NSLog(@"开始编码一帧数据");
}

#pragma mark - 获取编码后的数据
void didCompressionCallback(void * CM_NULLABLE outputCallbackRefCon,
                                    void * CM_NULLABLE sourceFrameRefCon,
                                    OSStatus status,
                                    VTEncodeInfoFlags infoFlags,
                            CM_NULLABLE CMSampleBufferRef sampleBuffer ){
    //0.获取当前上下文，（当前对象）
    H264Encoder *encoder = (__bridge H264Encoder *)(outputCallbackRefCon);
    
    //1.判断是否是关键帧
    CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
    CFDictionaryRef dict = CFArrayGetValueAtIndex(attachments, 0);
    Boolean isKeyFrame = CFDictionaryContainsKey(dict, kCMSampleAttachmentKey_NotSync);
    
    //2.如果是关键帧，获取SPS/PPS数据，并且写入文件
    if (isKeyFrame) {
        //2.1
        CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
        //2.2获取SPS信息 index= 0; PPS 's index = 1
        const uint8_t *spsOut;
        size_t spsSize,spsCount;
        CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &spsOut, &spsSize, &spsCount, NULL);
        
        const uint8_t *ppsOut;
        size_t ppsSize,ppsCount;
        CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &ppsOut, &ppsSize, &ppsCount, NULL);
        
        NSData *spsData = [NSData dataWithBytes:spsOut length:spsSize];
        NSData *ppsData = [NSData dataWithBytes:ppsOut length:ppsSize];
        
        //2.5写入文件(NALU单元：0x00 00 00 01 起始地址)
        [encoder writeData:spsData];
        [encoder writeData:ppsData];
    }
    
    //3.获取编码后的数据，写入文件
    //3.1获取CMBlockBufferRef
    CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
    
    //3.2从blockBuffer中获取其实位置的内存地址
    size_t totalLenght = 0;
    char *dataPoint;
    CMBlockBufferGetDataPointer(blockBuffer, 0, NULL, &totalLenght, &dataPoint);
    
    //3.3数据写入NALU单元， 一帧的图像可能需要写入多个NALU单元 -->  Slice切片
    static const int H264HeaderLenghth = 4;
    size_t bufferOffset = 0;
    while (bufferOffset < totalLenght - H264HeaderLenghth){
        //3.4从从起始位置拷贝H264HeaderLenghth长度的地址，计算NALU的Length
        int NALULenght = 0;
        memcpy(&NALULenght, dataPoint + bufferOffset, H264HeaderLenghth);
        
        //大端模式/小端模式 -->系统模式
        //H264编码的数据是大端模式（字节序）
        NALULenght = CFSwapInt32BigToHost(NALULenght);
        
        //3.5 从dataPointer开始，根据长度创建NSData
        NSData *data = [NSData dataWithBytes:dataPoint + bufferOffset +  H264HeaderLenghth length:NALULenght];
        
        //3.6写入文件
        [encoder writeData:data];
        
        //3.。7 重新设置bufferOffset]
        bufferOffset += NALULenght + H264HeaderLenghth;
    }
    
}

- (void)writeData:(NSData *)data {
    //1.后去startCode
    const char bytes[] = "\x00\x00\x00\x01";    //字符串最后一个  \0
    
    //2.获取headerData
    NSData *headerData = [NSData dataWithBytes:bytes length:sizeof(bytes) - 1];
    
    //3.写入文件
    [self.fileHandle writeData:headerData];
    [self.fileHandle writeData:data];
}

- (void)endEncode {
    VTCompressionSessionInvalidate(self.compressionSession);
    CFRelease(self.compressionSession);
}

@end
