#include"common.h"
#define USE_SDL_LIB 1
#define USE_YUV_SAVE 0
#define SHOW_FRAME_PTS 0
static AVFormatContext*fmt_ctx;
static AVCodecContext *dec_ctx;
static int video_stream_index = -1;
static int64_t last_pts = AV_NOPTS_VALUE;
#if USE_YUV_SAVE
static FILE*yuvfp=NULL;
#endif
#if USE_SDL_LIB
static SDL_Surface*screen = NULL;
static SDL_Overlay*image = NULL;
static SDL_Thread *thread = NULL;
static char*yuvbuffer=NULL;
static SDL_Rect yuvrect ={0};
int SDL_VIDEO_Thread(void*data);
#endif
static int running = 1;
int open_input_file(const char*filename);
int display_frame(const AVFrame *frame, AVRational time_base);
int read_every_frame();
int TestFilterVideo(const char*filename){
    avcodec_register_all();
	av_register_all();
    if (open_input_file(filename)< 0)
		goto end;
#if USE_YUV_SAVE
	yuvfp =fopen("test.yuv","wb");
#endif
#if USE_SDL_LIB
	SDL_Init(SDL_INIT_VIDEO);
	screen = SDL_SetVideoMode(dec_ctx->width, dec_ctx->height, 24, SDL_SWSURFACE);
	image =  SDL_CreateYUVOverlay(dec_ctx->width, dec_ctx->height, SDL_IYUV_OVERLAY, screen);
    yuvbuffer=(char*)malloc(dec_ctx->width*dec_ctx->height*3/2);
    yuvrect.w=dec_ctx->width;
    yuvrect.h=dec_ctx->height;
    image->pixels[0] = yuvbuffer;
	image->pixels[1] = yuvbuffer+dec_ctx->width*dec_ctx->height;
	image->pixels[2] = yuvbuffer+dec_ctx->width*dec_ctx->height*5/4;
	thread=SDL_CreateThread(SDL_VIDEO_Thread, NULL);
	while (running) {
		SDL_Event event = { 0 };
		SDL_PollEvent(&event);
		if (event.type == SDL_QUIT) {
			printf("Recv Quit Single\n");
			running = 0;
		}
	}
#else
	read_every_frame();
#endif
end:
#if USE_YUV_SAVE
    if (yuvfp !=NULL) {
        fclose(yuvfp);
		yuvfp =NULL;
    }
#endif
#if USE_SDL_LIB
	if(image)SDL_FreeYUVOverlay(image);
    if(yuvbuffer)free(yuvbuffer);
	SDL_Quit();
#endif
    if(dec_ctx) avcodec_close(dec_ctx);
    avformat_close_input(&fmt_ctx);
    return 0;
}
int open_input_file(const char*filename){
    AVCodec *dec;
    int ret;
    if ((ret = avformat_open_input(&fmt_ctx, filename, NULL, NULL)) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot open input file\n");
        return ret;
    }
    
    if ((ret = avformat_find_stream_info(fmt_ctx, NULL)) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot find stream information\n");
        return ret;
    }
    
    /* select the video stream */
    ret = av_find_best_stream(fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0);
    if (ret < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot find a video stream in the input file\n");
        return ret;
    }
    video_stream_index = ret;
    dec_ctx = fmt_ctx->streams[video_stream_index]->codec;
    
    /* init the video decoder */
    if ((ret = avcodec_open2(dec_ctx, dec, NULL)) < 0) {
        av_log(NULL, AV_LOG_ERROR, "Cannot open video decoder\n");
        return ret;
	}

	printf("codec:%s(%s) width:%d height:%d timebase:%d/%d\n", 
		avcodec_get_name(dec_ctx->codec_id),
        av_get_pix_fmt_name(dec_ctx->pix_fmt),
		dec_ctx->width,dec_ctx->height, 
		dec_ctx->pkt_timebase.den,dec_ctx->pkt_timebase.num);
    return 0;
}
int read_every_frame() {
	AVPacket packet;
	AVFrame *frame = av_frame_alloc();
	int got_frame;
	int ret;
	while (running) {
		if ((ret = av_read_frame(fmt_ctx, &packet)) < 0)
			break;
		if (packet.stream_index == video_stream_index) {
			//avcodec_get_frame_defaults(frame);
			got_frame = 0;
			ret = avcodec_decode_video2(dec_ctx, frame, &got_frame, &packet);
			if (ret < 0) {
				av_log(NULL, AV_LOG_ERROR, "Error decoding video\n");
				break;
			}
#if SHOW_FRAME_PTS
			printf("got_frame:%d packet_pts:%I64d frame_pts:%I64d\n", got_frame, packet.pts, frame->pts);
#endif
			if (got_frame) {
				frame->pts = av_frame_get_best_effort_timestamp(frame);
				/* pull filtered frames from the filtergraph */
				display_frame(frame, dec_ctx->pkt_timebase);
			}
		}
		av_packet_unref(&packet);
	}
	av_frame_free(&frame);
	return 0;
}
int display_frame(const AVFrame *frame, AVRational time_base){
    int i,j;
    if (frame->pts != AV_NOPTS_VALUE) {
        if (last_pts != AV_NOPTS_VALUE) {
            /* sleep roughly the right amount of time;
             * usleep is in microseconds, just like AV_TIME_BASE. */
			double pts = av_q2d(time_base)*frame->pts;
#if (_MSC_VER == 1200)
            AVRational cq={1, AV_TIME_BASE};
            int64_t delay = av_rescale_q(frame->pts - last_pts,time_base, cq);
#else
			int64_t delay = av_rescale_q(frame->pts - last_pts,time_base, AV_TIME_BASE_Q);
#endif
#if SHOW_FRAME_PTS
            printf("frame->pts:%I64d delay:%I64d timestamp:%f ms\n",frame->pts, delay,pts*1000);
#endif
#if USE_SDL_LIB
            do{
                int i,j;
                char*pos=yuvbuffer;
                for(i=0;i<3;i++){
                    int shift=i>0?1:0;
                    char*yuv=frame->data[i];
                    for (j=0;j<frame->height>>shift;j++){
                        memcpy(pos,yuv,frame->width>>shift);
                        yuv+=frame->linesize[i];
                        pos+=(frame->width>>shift);
                    }
	            }
            }while(0);
			SDL_DisplayYUVOverlay(image,&yuvrect);
			SDL_Delay(delay/1000);
#endif
        }
        last_pts = frame->pts;
    }
#if USE_YUV_SAVE
	if (yuvfp ==NULL){
		return 0;
	}	
	for(i=0;i<3;i++){
		int shift=i>0?1:0;
		char*yuv=frame->data[i];
		for (j=0;j<frame->height>>shift;j++){
			fwrite(yuv,sizeof(char),frame->width>>shift, yuvfp);
			yuv+=frame->linesize[i];
		}
	}
#endif
    return 0;
}
#if USE_SDL_LIB
static int SDL_VIDEO_Thread(void*data) {
	read_every_frame();
	running = 0;
	return 0;
}
#endif
