//
//  ViewController.m
//  VideoHardDecode
//
//  Created by ToothBond on 2017/5/11.
//  Copyright © 2017年 rensq. All rights reserved.
//

#import "ViewController.h"
#import <VideoToolbox/VideoToolbox.h>
#import "AAPLEAGLLayer.h"

const char pStartCode[] = "\x00\x00\x00\x01";

@interface ViewController ()
{
    //每次循环读取到的数据
    long inputMaxSize; //本次最大要读多少
    long inputSize;  //实际读取了多少
    uint8_t *inputBuffer; //内存地址
    
    
    //解析的数据
    long packetSize;
    uint8_t *packetBuffer;
    
    long spsSize;
    uint8_t *pSPS;
    
    long ppsSize;
    uint8_t *pPPS;
    
}

@property(nonatomic,weak)CADisplayLink *displayLink;
@property(nonatomic,strong)NSInputStream *inputStream;
@property(nonatomic,strong)dispatch_queue_t queue;
@property(nonatomic,assign)VTDecompressionSessionRef decompresionSession;
@property(nonatomic,assign)CMVideoFormatDescriptionRef formatDescription;
@property(nonatomic,weak)AAPLEAGLLayer *openGLLayer;

@end

@implementation ViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    
    //1.创建CADisplayLink
    CADisplayLink *displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(updatePicFrame)];
    displayLink.frameInterval = 2;
//    displayLink.preferredFramesPerSecond = 30;
    self.displayLink = displayLink;
    [displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
    [displayLink setPaused:YES];
    
    //2.创建NSInputStream
    NSString *filePath = [[NSBundle mainBundle] pathForResource:@"123.h264" ofType:nil];
    self.inputStream = [NSInputStream inputStreamWithFileAtPath:filePath];
    
    //3.创建线程队列
    self.queue = dispatch_get_global_queue(0, 0);
    
    //4.创建渲染的Layer
    AAPLEAGLLayer *layer = [[AAPLEAGLLayer alloc] initWithFrame:self.view.bounds];
    self.openGLLayer = layer;
    [self.view.layer insertSublayer:layer atIndex:0];
}
- (IBAction)play:(id)sender {
    
    //1.初始化一次读取多少数据，以及数据的长度，数据存放在哪里
    inputMaxSize = 720 * 1280;
    inputSize = 0;
    inputBuffer = malloc(inputMaxSize);
    
    //打开流
    [self.inputStream open];
    
    //2.开始读取数据
    [self.displayLink setPaused:NO];
}


- (void)updatePicFrame
{
    dispatch_sync(self.queue, ^{
        //1.读取数据
        [self readPacket];
        
        //2.判断数据的类型
        if (packetSize == 0 && packetBuffer == NULL) {
            [self.displayLink setPaused:YES];
            [self.inputStream close];
            NSLog(@"数据已经读完了");
            return;
        }
        
        NSLog(@"读取到数据");
        //3.解码  H264大端数据  数据是在内存中：系统端数据
        uint32_t nalSize = (uint32_t)(packetSize - 4);
        uint32_t *pNAL = (uint32_t *)packetBuffer;
        *pNAL = CFSwapInt32BigToHost(nalSize);
        
        //4.获取类型 sps： 0x27; pps 0x28 IDR ：0x25
        //前5位 sps:0x07  pps:0x08  i:0x05
        //00 00 00 0A 27
        //取出第五字节 计算该字节的前5位
        int nalType = packetBuffer[4] & 0x1F;
        switch (nalType) {
            case 0x07:
                spsSize = packetSize - 4;
                pSPS = malloc(spsSize);
                memcpy(pSPS, packetBuffer + 4, spsSize);
                break;
            case 0x08:
                ppsSize = packetSize - 4;
                pPPS = malloc(ppsSize);
                memcpy(pPPS, packetBuffer + 4, ppsSize);
                break;
            case 0x05:
                //1.创建VRDecompressionSessionRef  --> sps/pps --> gop
                [self initDecompressSession];
                
                //2.解码I帧
                [self decodeFrame];
                break;
                
            default:
                [self decodeFrame];
                break;
        }
        
    });
}

#pragma mark - 从文件中读取一个NALU数据
//AVFrame (编码前的数据)/AVPacket（编码后的数据）
- (void)readPacket
{
    //1. 开始读取之前，先清空之前的数据
    if (packetSize || packetBuffer) {
        packetSize = 0;
        free(packetBuffer);
        packetBuffer = nil;
    }
    
    //2.读取数据        //每次读，每次填满
    if (inputSize < inputMaxSize && self.inputStream.hasBytesAvailable) {
        inputSize += [self.inputStream read:inputBuffer + inputSize maxLength:inputMaxSize - inputSize];//inputBuffer指向内存的首地址
    }
    //inputSize == inputMaxSize
    
    //3.获取解码想要的数据
    //-1 false 0 true
    if (memcmp(inputBuffer, pStartCode, 4) == 0) {  //判断是不是已 pStartCode开头
        uint8_t *pStart = inputBuffer + 4;
        uint8_t *pEnd = inputBuffer + inputSize;
        
        while (pStart != pEnd) {
            if (memcmp(pStart - 3, pStartCode, 4 ) == 0) {//获取到下一个 pStartCode
                packetSize = pStart -3 - inputBuffer;
                //从inputBuffer中，拷贝数据到packetBuffer
                packetBuffer = malloc(packetSize);
                memcpy(packetBuffer, inputBuffer, packetSize);
                
                //将inputBuffer移动到最前方
                memmove(inputBuffer, inputBuffer + packetSize, inputSize - packetSize);
                inputSize -= packetSize;
                
                break;
            }else{
                pStart ++;
            }
        }
    }
}

#pragma mark - 初始化VTDecompressionSessionRef
- (void)initDecompressSession
{
    //1 创建 CMVideoFormatDescriptionRef
    const uint8_t *pParamsSet[2] = {pSPS,pPPS};
    const size_t pParamSizes[2] = {spsSize,ppsSize};
    
    CMVideoFormatDescriptionCreateFromH264ParameterSets(NULL, 2, pParamsSet, pParamSizes, 4, &_formatDescription);
    
    //2.创建VTDecompressionSessionRef  YUV(YCrCb)/RGB
    
    NSDictionary *attrs = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
    VTDecompressionOutputCallbackRecord callbackRecord;
    callbackRecord.decompressionOutputCallback = decodeCallback;

    VTDecompressionSessionCreate(NULL, self.formatDescription, NULL, (__bridge CFDictionaryRef _Nullable)(attrs), &callbackRecord, &(_decompresionSession));
}

void decodeCallback(
              void * CM_NULLABLE decompressionOutputRefCon,
              void * CM_NULLABLE sourceFrameRefCon,
              OSStatus status,
              VTDecodeInfoFlags infoFlags,
              CM_NULLABLE CVImageBufferRef imageBuffer,
              CMTime presentationTimeStamp,
              CMTime presentationDuration ){
    NSLog(@"解码出一帧数据");
    //展示CVImageBuffer
    
    ViewController *vc = (__bridge ViewController *)sourceFrameRefCon;
    vc.openGLLayer.pixelBuffer = imageBuffer;
}

#pragma mark - 解码数据
- (void)decodeFrame
{
    // SPS/PPS      CMBlockBuffer
    //1. 通过数据创建一个CMBlockBuffer
    CMBlockBufferRef blockBuffer;
    CMBlockBufferCreateWithMemoryBlock(NULL, (void *)packetBuffer, packetSize, kCFAllocatorNull, NULL, 0, packetSize, 0, &blockBuffer);
    
    //2.准备CMSampleBufferRef
    size_t sizeArray[] = {packetSize};
    CMSampleBufferRef sampleBuffer;
    CMSampleBufferCreateReady(NULL, blockBuffer, self.formatDescription, 0, 0, NULL, 0, sizeArray, &sampleBuffer);
    
    //3.开始解码操作
    VTDecompressionSessionDecodeFrame(self.decompresionSession, sampleBuffer, 0, (__bridge void * _Nullable)(self), NULL);
    
}

@end
