//
//  H264Encoder.m
//  VedioEncoder
//
//  Created by mac on 2017/6/21.
//  Copyright © 2017年 mac. All rights reserved.
//

#import "H264Encoder.h"

static long long frameCount;

@interface H264Encoder ()
{
    VTCompressionSessionRef encoderSession;
    dispatch_queue_t encoderQueue;
    
    NSData *_spsData;
    NSData *_ppsData;
    
    id<H264EncoderDelegate> _delegate;
}
@end

@implementation H264Encoder
+(H264Encoder *)defaultEncoder
{
    static H264Encoder *encoder = nil;
    static dispatch_once_t token;
    dispatch_once(&token, ^{
        encoder = [[self alloc]init];
    });
    return encoder;
}

- (instancetype)init
{
    self = [super init];
    if(self)
    {
        encoderQueue = dispatch_queue_create("encoderVideo", DISPATCH_QUEUE_SERIAL);
    }
    return self;
}
- (void)setDelegate:(id<H264EncoderDelegate>)delegate
{
    _delegate = delegate;
}
- (void)initConfiguration
{
    frameCount = 0;
    OSStatus status = VTCompressionSessionCreate(kCFAllocatorDefault, 375, 667, kCMVideoCodecType_H264, NULL, NULL, NULL, &encodeOutputCallback, (__bridge void *)self, &encoderSession);
    if(status != 0)
    {
        NSLog(@"create Compression error");
    }
    status = VTSessionSetProperty(encoderSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
    NSLog(@"set realtime return %d",(int)status);
    
    status = VTSessionSetProperty(encoderSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
    NSLog(@"set profile return %d",(int)status);
    
    //设置关键帧间隔(GOPsize)
    int frameInterval = 10;
    CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval);
    VTSessionSetProperty(encoderSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef);
    
    //设置期望帧率
    int fps = 30;
    CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps);
    VTSessionSetProperty(encoderSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef);
    
    //设置码率， 上限， 单位是bps
    int bitRate = 375 * 667 * 3 * 4 * 8;
    CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate);
    VTSessionSetProperty(encoderSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef);
    
    //设置码率， 均值，单位是byte
    int bitRateLimit = 375 * 667 * 3 * 4;
    CFNumberRef bitRateLimitRef = CFNumberCreate(kCFAllocatorDefault , kCFNumberSInt32Type, &bitRateLimit);
    VTSessionSetProperty(encoderSession, kVTCompressionPropertyKey_DataRateLimits, bitRateLimitRef);
//    int bitRate = 375*667*3*4*8;
//    CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate);
//    status = VTSessionSetProperty(encoderSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef);
//    NSLog(@"set bit rate return %d",(int)status);
    
    status = VTCompressionSessionPrepareToEncodeFrames(encoderSession);
}

- (void)encoderSampleBuffer:(CMSampleBufferRef)buffer
{
    dispatch_sync(encoderQueue, ^{
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(buffer);
        CMTime pts = CMTimeMake(frameCount, 1000);
        CMTime duration = CMSampleBufferGetDuration(buffer);
        VTEncodeInfoFlags flags;
        OSStatus status = VTCompressionSessionEncodeFrame(encoderSession, imageBuffer, pts, duration, NULL, NULL, &flags);
        if(status != 0)
        {
            NSLog(@"encode frame with status %d",(int)status);
        }
        frameCount ++ ;
        
    });
}

void encodeOutputCallback(void *userData, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags,
                          CMSampleBufferRef sampleBuffer )
{
    if(status != 0)
    {
        NSLog(@"h264编码失败");
    }
    H264Encoder *this = (__bridge H264Encoder *)userData;
    bool isKeyFrame = !CFDictionaryContainsKey((CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), kCMSampleAttachmentKey_NotSync);
    if(isKeyFrame)
    {
        CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
        size_t sparamterSetSize,sparamterSetCount;
        const uint8_t *sparamterSet;
        OSStatus statusCodes = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparamterSet, &sparamterSetSize, &sparamterSetCount, 0);
        if(statusCodes == 0)
        {
            size_t pparamterSetSize,pparamterSetCount;
            const uint8_t *pparamterSet;
            OSStatus statusCodep = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparamterSet, &pparamterSetSize, &pparamterSetCount, 0);
            if(statusCodep == 0)
            {
                this->_spsData = [NSData dataWithBytes:sparamterSet length:sparamterSetSize];
                this->_ppsData = [NSData dataWithBytes:pparamterSet length:pparamterSetSize];
            }
            if(this->_delegate)
            {
                [this->_delegate getSpsPps:this->_spsData pps:this->_ppsData];
            }
        }
    }
    
    CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
    size_t length,totalLength;
    char *dataPointer;
    OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
    if(statusCodeRet == 0)
    {
        size_t bufferOffset = 0;
        static const int AVCCHeaderLength = 4;
        while (bufferOffset<totalLength-AVCCHeaderLength)
        {
            uint32_t NALUnitLength = 0;
            memcpy(&NALUnitLength, dataPointer+bufferOffset, AVCCHeaderLength);
            NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
            NSData *data = [[NSData alloc]initWithBytes:(dataPointer+bufferOffset+AVCCHeaderLength) length:NALUnitLength];
            if(this->_delegate)
            {
                [this->_delegate getEncodeData:data isKeyFrame:isKeyFrame frameCount:frameCount];
            }
            bufferOffset+=AVCCHeaderLength + NALUnitLength;
        }
    }
}
@end
