//
//  FFMPEGClinet_Video.m
//  IPCamera
//
//  Created by 宣佚 on 15/6/16.
//  Copyright (c) 2015年 Andon Health Co,.Ltd. All rights reserved.
//

#import "FFMPEGClinet_Video.h"

@interface FFMPEGClinet_Video ()
{
    NSData *startcodeData;
    NSData *lastStartCode;
    
    //ffmpeg
    AVFrame *frame;
    AVFrame *RGBFrame;
    AVPicture picture;
    AVCodec *codec;
    AVCodecContext *codecCtx;
    AVPacket packet;
    struct SwsContext *img_convert_ctx;
    
    int numBytes;
    uint8_t *buffer;
    
    CGSize viewVedioSize;
}

@end

@implementation FFMPEGClinet_Video

+ (FFMPEGClinet_Video *)sharedInstance
{
    static FFMPEGClinet_Video *manager = nil;
    
    static dispatch_once_t onceToken;
    
    dispatch_once(&onceToken, ^{
        manager = [[FFMPEGClinet_Video alloc] init];
    });
    
    return manager;
}

- (void)initVideoForNetH264Stream
{
    avcodec_register_all();

    codec = avcodec_find_decoder(AV_CODEC_ID_H264);
    
    codecCtx = avcodec_alloc_context3(codec);
    
    int ret = avcodec_open2(codecCtx, codec, nil);
    if (ret != 0){
        NSLog(@"open codec failed :%d",ret);
        return;
    }
    
    frame = av_frame_alloc();
    RGBFrame = av_frame_alloc();
    
    if(RGBFrame == NULL || frame == NULL)
    {
        NSLog(@"分配帧失败");
        return ;
    }
    
}

// 上面已经所环境建立起来了， 下面使用上面的这个解码器来进行解码

// 当有数据过来后，调用下面这个方法来生成UIImage
// 这时传入的数据是一个完整的Ｉ帧包或者完整的Ｐ帧包，总之是一个完整的可以显示数据的帧包
- (UIImage *)decodeWithMediaPureData:(uint8_t *)buf Size:(int) size{
    
    // 把到来的数据，放置在AVPacket之中
    packet.data = buf;
    packet.size = size;
    
    // 把数据从packet写到pFrame中
    int got_picture_ptr = 0;
    int ret = avcodec_decode_video2(codecCtx, frame, &got_picture_ptr, &packet);
    if(ret <= 0 ){
        NSLog(@"decode frame failed!");
        return nil;
    }
    
    numBytes = avpicture_get_size(PIX_FMT_RGB24, codecCtx->width, codecCtx->height);
    
    if(!buffer){
        buffer = av_malloc(numBytes);
    }
    avpicture_fill((AVPicture *)RGBFrame, buffer, PIX_FMT_RGB24, codecCtx->width, codecCtx->height);
    
    img_convert_ctx = sws_getCachedContext(img_convert_ctx, codecCtx->width, codecCtx->height, codecCtx->pix_fmt, codecCtx->width, codecCtx->height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
    
    if(!img_convert_ctx)
    {
        NSLog(@"不能初始化 sws conversion context");
        return nil;
    }
    sws_scale(img_convert_ctx, (const uint8_t* const *)frame->data, frame->linesize, 0, codecCtx->height, RGBFrame->data, RGBFrame->linesize);

     UIImage * image=[self convertImageWithAVFrameRGB:RGBFrame Width:codecCtx->width Height:codecCtx->height];
    
        return image;
}

-(UIImage *)convertImageWithAVFrameRGB:(AVFrame *) frameRGB Width: (int)width Height:(int)height {
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
    CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, (UInt8*)frameRGB->data[0], frameRGB->linesize[0]*height,kCFAllocatorNull);
    CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
    CGImageRef cgImage = CGImageCreate(width,
                                       height,
                                       8,
                                       24,
                                       frameRGB->linesize[0],
                                       colorSpace,
                                       bitmapInfo,
                                       provider,
                                       NULL,
                                       YES,
                                       kCGRenderingIntentDefault);
    CGColorSpaceRelease(colorSpace);
    
    UIImage *image =[UIImage imageWithCGImage:cgImage ];
    
    CGImageRelease(cgImage);
    CGDataProviderRelease(provider);
    CFRelease(data);
    
    return image;
}

@end
