//
//  FFmpegDecoder.m
//  FFmpegDemo
//
//  Created by WZ_MAC on 2017/7/28.
//  Copyright © 2017年 wz. All rights reserved.
//


#import "FFmpegDecoder.h"

@interface FFmpegDecoder()
{
    
    AVFormatContext     *_formatCtx;
    AVCodecContext      *_pCodecCtx;
    int                 _videoStream;
    
    NSData *_spsData;
    NSData *_ppsData;
    BOOL   _stopDecode;
}


@end


@implementation FFmpegDecoder

-(void)dealloc {

    NSLog(@"%@销毁了！",NSStringFromClass([self class]));
}

+ (instancetype)createDecoderWithPath: (NSString *)path error: (NSError **)error {
    
    FFmpegDecoder *decoder = [[FFmpegDecoder alloc] init];
    [decoder openFile:path error:error];
    return decoder;
}


- (BOOL)openFile: (NSString *)path error: (NSError **)error {
    
    NSAssert(path, @"path cannot be nil!");
    
    //初始化网络
    avformat_network_init();
    av_register_all();
    
    //打开实时流
    AVFormatContext *ctx = NULL; //ctx是指向AVFormatContext结构体的指针变量
    const char *filename = [path cStringUsingEncoding:NSUTF8StringEncoding];
    
    int res = avformat_open_input(&ctx, filename, NULL, NULL);
    if(res <0) {
        if(ctx) avformat_free_context(ctx); //c语言需手动销毁ctx
        NSLog(@"open file failed:%@",path);
        return NO;
    }
    
    //记录ctx
    _formatCtx = ctx;
    _formatCtx->flags = AVFMT_FLAG_KEEP_SIDE_DATA;
    
    //获取到实时流数据
    AVDictionary *options = NULL;
    //设置option字典
    av_dict_set(&options, "rtsp_transport", "tcp", 0);      //把视频流的传输模式强制成tcp传输
    //设置加载时间
    //av_dict_set(&options, "analyzeduration", "6000000", 0); //解析的最大时长这里的数字代表微妙 2000000/1000000 = 2s
    //av_dict_set(&options, "probesize", "122880", 0);        //解析的容量上限为122880/1024M = 120M 可以自己设置不能太小否则会导致流的信息分析不完整
    
    
    if (avformat_find_stream_info(ctx, &options) <0) {
        avformat_close_input(&ctx);
        NSLog(@"not found stream data:%@",path);
        return NO;
    }

    _videoStream = -1;
    for (int i = 0; i < _formatCtx->nb_streams; i++) {
        if (_formatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            _videoStream = i;
            break;
        }

    }
    _pCodecCtx = _formatCtx->streams[_videoStream]->codec;
    
    
    AVCodec *pcodec = avcodec_find_decoder(_pCodecCtx->codec_id);
    
    avcodec_open2(_pCodecCtx, pcodec, NULL);
    
    
    /** 添加了这句，ffmpeg就会在控制台打印stream的相关信息(码流、延迟、元数据等)
     * Print detailed information about the input or output format, such as
     * duration, bitrate, streams, container, programs, metadata, side data,
     * codec and time base.
     *
     * @param ic        the context to analyze
     * @param index     index of the stream to dump information about
     * @param url       the URL to print, such as source or destination file
     * @param is_output Select whether the specified context is an input(0) or output(1)
     */
    av_dump_format(ctx, 0, [path cStringUsingEncoding:NSUTF8StringEncoding], false);
    

    return YES;
}


// 利用FFMPEG的解码器，获取到sps和pps，IDR数据，SPS和PPS数据在codec中的extradata中，IDR数据在packet的data中
// ffmpeg内部会利用rtsp协议与相机推流端口建立tcp连接，利用ffmpeg读取H264实时流的数据包AVPacket
- (void)startReadAVPacketAndDecodeNalu {
    
    _pCodecCtx = _formatCtx->streams[_videoStream]->codec;
    
    //查找sps&pps创建formatDesc,创建DecodeSession
    [self findSPSAndPPSInCodec:_pCodecCtx];
  
    //读取frame进行解码
    _stopDecode = NO;
    AVPacket packet;
    
    //该代码就在子线程中不断的执行，直到_stopDecode被主线程中的操作改为YES;
    //while (av_read_frame(_formatCtx, &packet) >= 0 && _stopDecode == NO) {
    while (_stopDecode == NO && av_read_frame(_formatCtx, &packet) >= 0) { //_stop=YES,就不要读数据了        
        if (packet.stream_index == _videoStream) { // Whether is video stream
            [self.h264Decoder decodeNalu:packet.data withSize:packet.size];
        }
    }
    
}

- (void)stopReadAVPacketAndDecodeNalu {
    //这里是主线程，没有被阻塞，可以更改_stopDecode的值，改了之后，子线程中while循环就会停止了
    _stopDecode = YES;
}



//查找SPS和PPS，并利用sps和pps创建formatDesc
- (void)findSPSAndPPSInCodec:(AVCodecContext *)codec {
    
    NSData *extraData1 = [NSData dataWithBytes:codec->extradata length:codec->extradata_size];
    NSLog(@"extraData1:%@",extraData1);
    
    // 将用不上的字节替换掉，在SPS和PPS前添加开始码
    // 假设extradata数据为 0x01 64 00 0A FF E1 00 19 67 64 00 00...其中67开始为SPS数据
    //  则替换后为0x00 00 00 01 67 64...
    
    // 使用FFMPEG提供的方法。
    // 我一开始以为FFMPEG的这个方法会直接获取到SPS和PPS，谁知道只是替换掉开始码。
    // 要注意的是，这段代码会一直报**Packet header is not contained in global extradata, corrupted stream or invalid MP4/AVCC bitstream**。可是貌似对数据获取没什么影响。我就直接忽略了
    uint8_t *dummy = NULL;
    int dummy_size;
    AVBitStreamFilterContext* bsfc =  av_bitstream_filter_init("h264_mp4toannexb"); //将mp4模式转换为annexb模式(安霸联咏相机都是annexb模式，转不转无所谓，为了代码通用起见还是转一下吧)
    av_bitstream_filter_filter(bsfc, codec, NULL, &dummy, &dummy_size, NULL, 0, 0);
    free(dummy);
    
    
    NSData *extraData2 = [NSData dataWithBytes:codec->extradata length:codec->extradata_size];
    NSLog(@"extraData2:%@",extraData2);
    
    // 获取SPS和PPS的数据和长度
    int startCodeSPSIndex = 0;
    int startCodePPSIndex = 0;
    uint8_t *extradata = codec->extradata;
    for (int i = 3; i < codec->extradata_size; i++) {
        if (extradata[i] == 0x01 && extradata[i-1] == 0x00 && extradata[i-2] == 0x00 && extradata[i-3] == 0x00) {
            if (startCodeSPSIndex == 0) startCodeSPSIndex = i + 1;
            if (i > startCodeSPSIndex) {
                startCodePPSIndex = i + 1;
                break;
            }
        }
    }
    
//    // 这里减4是因为需要减去PPS的开始码的4个字节
//    int spsLength = startCodePPSIndex - 4 - startCodeSPSIndex;
//    int ppsLength = codec->extradata_size - startCodePPSIndex;
//    _spsData = [NSData dataWithBytes:&extradata[startCodeSPSIndex] length:spsLength];
//    [self.h264Decoder decodeNalu:_spsData.bytes withSize:spsLength];
    
    
    int spsLength = startCodePPSIndex - 4; //pps数据部分开始的索引-4就是sps数据长度
    int ppsLength = codec->extradata_size - spsLength; //总数据-sps就是pps
    
    _spsData = [NSData dataWithBytes:&extradata[0] length:spsLength];
    [self.h264Decoder decodeNalu:(uint8_t *)_spsData.bytes withSize:spsLength];
    
    
    _ppsData = [NSData dataWithBytes:&extradata[startCodePPSIndex -4] length:ppsLength];
    [self.h264Decoder decodeNalu:(uint8_t *)_ppsData.bytes withSize:ppsLength];
}




@end
