//
//  VideoFrameCodec.m
//  rtspWithPlayer
//
//  Created by methew.

#import "VideoFrameCodec.h"
#import "Utilities.h"

@class iFrameExtractorAppDelegate;

extern iFrameExtractorAppDelegate * playerDelegate;

@interface VideoFrameCodec (private)
-(void)convertFrameToRGB;
-(UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height;
-(void)savePicture:(AVPicture)pFrame width:(int)width height:(int)height index:(int)iFrame;
-(void)setupScaler;
@end

@implementation VideoFrameCodec

@synthesize outputWidth, outputHeight;

-(void)setOutputWidth:(int)newValue {
	if (outputWidth == newValue) return;
	outputWidth = newValue;
	[self setupScaler];
}

-(void)setOutputHeight:(int)newValue {
	if (outputHeight == newValue) return;
	outputHeight = newValue;
	[self setupScaler];
}

-(UIImage *)currentImage {
	if (!pFrame->data[0]) return nil;
	[self convertFrameToRGB];
	return [self imageFromAVPicture:picture width:outputWidth height:outputHeight];
}

-(double)duration {
	return (double)pFormatCtx->duration / AV_TIME_BASE;
}

-(int)sourceWidth {
	return pCodecCtx->width;
}

-(int)sourceHeight {
	return pCodecCtx->height;
}

-(id)initWithVideo:(NSString *)moviePath {
	if (!(self=[super init])) return nil;
 
    AVCodec         *pCodec;
		
    // Register all formats and codecs
    av_register_all();
    // Open video file
    if(av_open_input_file(&pFormatCtx, [moviePath cStringUsingEncoding:NSASCIIStringEncoding], NULL, 0, NULL)!=0)
        goto initError; // Couldn't open file
	
    // Retrieve stream information
    if(av_find_stream_info(pFormatCtx)<0)
        goto initError; // Couldn't find stream information
	
    // Find the first video stream
    videoStream=-1;
    for(int i=0; i<pFormatCtx->nb_streams; i++)
	{	
        if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
        {
            videoStream=i;
            break;
        }
	}	
    if(videoStream==-1)
        goto initError; // Didn't find a video stream
	
    // Get a pointer to the codec context for the video stream
    pCodecCtx=pFormatCtx->streams[videoStream]->codec;
	
	// Find the decoder for the video stream
	pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
	
	if(pCodec==NULL)
        goto initError; // Codec not found
	
	//procc bit stream
	if(pCodec->capabilities & CODEC_CAP_TRUNCATED)
		pCodecCtx->flags|=CODEC_FLAG_TRUNCATED;
	
	
    // Open codec
    if(avcodec_open(pCodecCtx, pCodec)<0)
        goto initError; // Could not open codec
	
	
	//for asf bug
	//	if(pCodecCtx->frame_rate>1000 && pCodecCtx->frame_rate_base==1)
	//	{
	//		pCodecCtx->frame_rate_base=1000;
	//	}
	
    // Allocate video frame
    pFrame=avcodec_alloc_frame();
	
	self.outputWidth = pCodecCtx->width;
	self.outputHeight = pCodecCtx->height;
	return self;
	
initError:
	[self release];
	return nil;
}


-(void)setupScaler {

	// Release old picture and scaler
	avpicture_free(&picture);
	sws_freeContext(img_convert_ctx);	
	
	// Allocate RGB picture
	avpicture_alloc(&picture, PIX_FMT_RGB24, outputWidth, outputHeight);
	
	// Setup scaler
	static int sws_flags =  SWS_FAST_BILINEAR;
	img_convert_ctx = sws_getContext(pCodecCtx->width, 
									 pCodecCtx->height,
									 pCodecCtx->pix_fmt,
									 outputWidth, 
									 outputHeight,
									 PIX_FMT_RGB24,
									 sws_flags, NULL, NULL, NULL);
	
}

-(void)seekTime:(double)seconds {
	AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
	int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num * seconds);
	avformat_seek_file(pFormatCtx, videoStream, targetFrame, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
	avcodec_flush_buffers(pCodecCtx);
}

-(void)dealloc {
	// Free scaler
	sws_freeContext(img_convert_ctx);	

	// Free RGB picture
	avpicture_free(&picture);
	
    // Free the YUV frame
    av_free(pFrame);
	
    // Close the codec
    if (pCodecCtx) 
		avcodec_close(pCodecCtx);
	
    // Close the video file
    if (pFormatCtx) 
		av_close_input_file(pFormatCtx);
	
	[currentImage release];
	[super dealloc];
}


#define PACKETSIZE 10 * 1024 * 1024

-(BOOL)getAframeData
{
	
#if defined(PLAY_MEDIA_FILE)
	NSString * path = [Utilities bundlePath:/*@"sophie.mov"*//*@"videoh264"*/@"fmpegHD"];
	char * cstrPath = [path cStringUsingEncoding:NSASCIIStringEncoding];

#else
	NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
	NSString *direct = [paths objectAtIndex:0];
	direct = [direct stringByAppendingPathComponent:@"mediaDataBeforeDecode"];
	const char * cstrPath = [direct cStringUsingEncoding:NSASCIIStringEncoding];
#endif	

#if defined(PLAY_MEDIA_FILE)
	FILE * fHandle = fopen(cstrPath, "rb");
	if (fHandle) 
	{
		unsigned char * ptrMediaData = NULL;
		
		unsigned int head = 0xffffffff;
		int i = 0;
		int packetCount = 0;
		
		int fSize = 0;
		
		fseek(fHandle, 0, SEEK_END);
		fSize = ftell(fHandle);
		fSize -= mediaFileOffset;
		fseek(fHandle, mediaFileOffset, SEEK_SET);
		ptrMediaData = (unsigned char *)malloc(PACKETSIZE);
		memset(ptrMediaData,0,PACKETSIZE);
		
		fSize = (PACKETSIZE > fSize) ? fSize : PACKETSIZE;
		
		mediaDataLen = fread(ptrMediaData, sizeof(unsigned char), fSize, fHandle);
		
		for (i = 0; i < mediaDataLen; i ++) 
		{
			head = head << 8;
			head |= ptrMediaData[i];
			if (head == 0x000001B6) //vop head
			{
				packetCount ++;
			}
			
			if (packetCount > 1) //one  packet
			{
				
				mediaDataLen = i - 4 + 1;
				mediaFileOffset += mediaDataLen;
				
				if (packetData) 
				{
					free(packetData);
					packetData = NULL;
				}
				
				if (!packetData) 
				{
					int iCount = mediaDataLen;
					int tmp = iCount%8;
					if (tmp == 0) //align 8
					{
						iCount += 8;//end of packet
					}
					else
					{
						iCount -= tmp;
						iCount += 16;
					}

					packetData = (unsigned char *)malloc(iCount);
					memset(packetData,0,iCount);
					memcpy(packetData,ptrMediaData,mediaDataLen);
					
					mediaDataLen = iCount - 8;
					
//					NSLog(@"got media data!");
					
					free(ptrMediaData);
					ptrMediaData = NULL;
					fclose(fHandle);
					fHandle = NULL;
					return YES;
					
				}
				
			}
			
		}
		
//		NSLog(@"bad format!");
		
		free(ptrMediaData);
		ptrMediaData = NULL;
		fclose(fHandle);
		fHandle = NULL;
		return NO;
	}
	else 
	{
		return NO;
	}
#else
	
	if(playerDelegate)
	{
		return [playerDelegate getMediaDataToFIFO:&packetData size:&mediaDataLen];
	}
	else 
	{
		return NO;
	}


#endif
	
}


-(BOOL)setMyFrameToDecoder:(unsigned char *)data size:(int)len
{
	int frameFinished=0;
	int ret = -1;
	AVPacket packet;
	int iCount = len;
	int tmp = iCount%8;
	if (tmp == 0) //align 8
	{
		iCount += 8;//end of packet
	}
	else 
	{
		iCount -= tmp;
		iCount += 16;
	}
	
	packet.data = (unsigned char *)malloc(iCount);
	packet.size = iCount;
	
	memset(packet.data , 0 ,iCount);
	memcpy(packet.data ,data,len);
	
	ret = avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size);
	
	if (frameFinished) 
	{
		return YES;
	}
	else 
	{
		return NO;
	}

}


-(BOOL)stepFrame {

	AVPacket packet;
    int frameFinished=0;
	int ret = -1;
	BOOL readFrameFlag = [self getAframeData];
    if(!frameFinished && readFrameFlag) 
	{
        // Is this a packet from the video stream?
		packet.data = packetData;
		packet.size = mediaDataLen;
		packet.stream_index = videoStream;		
		packet.pts = 0x8000000000000000;
		packet.dts = 0x8000000000000000;

#if !defined(RTSP_IPUX)		
//		pCodecCtx->bit_rate = 64000;
//		pCodecCtx->flags = 16384;
//		pCodecCtx->flags2 = 0x16680;//917760;
//		pCodecCtx->bidir_refine = 1;
//		pCodecCtx->reordered_opaque = 0x8000000000000000;
#endif//RTSP_IPUX
		
		// Decode video frame
		ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
//		ret = avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size);
		
		free(packetData);
		packetData = NULL;
		mediaDataLen = 0;
	}
	return frameFinished != 0;

}

-(void)convertFrameToRGB {	
	int height = (pCodecCtx->height < outputHeight) ? outputHeight : pCodecCtx->height;
	sws_scale (img_convert_ctx, pFrame->data, pFrame->linesize,
			   0, height,
			   picture.data, picture.linesize);	
}

-(UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height {
	CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
	CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, pict.data[0], pict.linesize[0]*height,kCFAllocatorNull);
	CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
	CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
	CGImageRef cgImage = CGImageCreate(width, 
									   height, 
									   8, 
									   24, 
									   pict.linesize[0], 
									   colorSpace, 
									   bitmapInfo, 
									   provider, 
									   NULL, 
									   NO, 
									   kCGRenderingIntentDefault);
	CGColorSpaceRelease(colorSpace);
	UIImage *image = [UIImage imageWithCGImage:cgImage];
	CGImageRelease(cgImage);
	CGDataProviderRelease(provider);
	CFRelease(data);
	
	return image;
}

-(void)resetMediaFile
{
	mediaFileOffset = 0;
	mediaDataLen = 0;
	if(packetData)
	{
		free(packetData);
		packetData = NULL;
	}
	
	
}

-(void)savePPMPicture:(AVPicture)pict width:(int)width height:(int)height index:(int)iFrame {
    FILE *pFile;
	NSString *fileName;
    int  y;
	
	fileName = [Utilities documentsPath:[NSString stringWithFormat:@"image%04d.ppm",iFrame]];
    // Open file
//    NSLog(@"write image file: %@",fileName);
    pFile=fopen([fileName cStringUsingEncoding:NSASCIIStringEncoding], "wb");
    if(pFile==NULL)
        return;
	
    // Write header
    fprintf(pFile, "P6\n%d %d\n255\n", width, height);
	
    // Write pixel data
    for(y=0; y<height; y++)
        fwrite(pict.data[0]+y*pict.linesize[0], 1, width*3, pFile);
	
    // Close file
    fclose(pFile);
}



@end
