#include <stdio.h>
#include <stdlib.h>
#include <linux/videodev.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <SDL.h>
#include <SDL_thread.h>
#include "common.h"
#include <time.h>

/*****************************************************************************/
/******************************** PROTOTYPES *********************************/
/*****************************************************************************/

int run_screen();
int driver_nego();
int show_cap();
int set_format();
int allocate_buffers();
void img_convert(AVPicture * target , int targetFmt, AVPicture * source ,int sourceFmt,int w, int h);
int get_AV_format(unsigned int v4l2_format);


/*****************************************************************************/
/*************************** GLOBAL VARIABLES ********************************/
/*****************************************************************************/

int file_desc;
int i_from_main;
static char my_video_dev[256] = "/dev/video1";//"/dev/video1";
struct v4l2_format fmt;
int 	used_format;

/*****************************************************************************/
/***************************    Methods		  ********************************/
/*****************************************************************************/

/*
 * VIDIOC_QUERYCAP
 * Get driver capabilities
 * Ret	: -1 - fail
 *	  otherwise - success
 */
int show_cap()
{
	struct v4l2_capability gCap;
	unsigned int flags;
	int i;

	i = ioctl(file_desc, VIDIOC_QUERYCAP, &gCap);
	if( i ) {
		perror("VIDIOC_QUERYCAP");
		return -1;
	}

	flags = gCap.capabilities;

	if( (flags & (V4L2_CAP_VIDEO_CAPTURE|V4L2_CAP_STREAMING)) != (V4L2_CAP_VIDEO_CAPTURE|V4L2_CAP_STREAMING) ) {
		ERROR("This driver doesn't support video streaming\n");
		return -1;
	}
	return 0;
}

void img_convert(AVPicture * target , int targetFmt, AVPicture * source ,int sourceFmt,int w, int h)
{
	static struct SwsContext *img_convert_ctx=NULL;

	if(img_convert_ctx == NULL)
	{
		img_convert_ctx = sws_getContext(w, h, sourceFmt, w, h, targetFmt, SWS_BICUBIC,NULL, NULL, NULL);
	}

	sws_scale(img_convert_ctx, (const uint8_t * const*)source->data, source->linesize, 0, h, target->data, target->linesize);
}

/*
 * VIDIOC_S_FMT, VIDIOC_G_FMT
 * Set desired format, dimension (maximum) and interlaced frame
 * Out	:   output_width - output frame width
 *	  		output_height - output frame height
 *	  		output_bytes - size of output frame
 * Ret	: -1 - fail
 *	  otherwise - success
 */
int set_format()
{
	int ret;

	fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
	fmt.fmt.pix.bytesperline = 0;

	fmt.fmt.pix.width = 320;
	fmt.fmt.pix.height = 240;
	fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;

	ret = ioctl(file_desc, VIDIOC_S_FMT, &fmt);
	if( ret ) {
		perror("VIDIOC_S_FMT");
		return -1;
	}
	used_format = get_AV_format(fmt.fmt.pix.pixelformat);
	return 0;
}

int get_AV_format(unsigned int v4l2_format)
{
	switch(v4l2_format)
	{
		case V4L2_PIX_FMT_RGB24:
			return PIX_FMT_RGB24;
		case V4L2_PIX_FMT_YUV420:
		default:
			return PIX_FMT_YUV420P;
	}
}

/*
 * Display video from v4l2 driver
 */
int run_screen()
{
	  AVFrame         			*pFrame;
	  int            			numBytes;
	  uint8_t         			*buffer;
  	  int 				        packet_offset;
  	  int 					    frame_finito;
	  int 						width, height;
	  SDL_Rect        			rect;
	  AVPicture 				pict;
	  AVPicture 				*frm;
//	  int 						frame_index;
//	  time_t 					secs;
	  SDL_Overlay     			*bmp;
	  SDL_Surface     			*screen;
	  frame_packet	  			*packet;


	  width = fmt.fmt.pix.width;
	  height = fmt.fmt.pix.height;


	// Register all formats and codecs
	av_register_all();

	if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
	{
		ERROR( "Could not initialize SDL - %s\n", SDL_GetError());
		return -1;
	}


	pFrame=avcodec_alloc_frame();
	if(pFrame==NULL)
		return -1;
	// Determine required buffer size and allocate buffer
	numBytes=avpicture_get_size(used_format, width, height);
	buffer=(uint8_t *)av_mallocz(numBytes*sizeof(uint8_t));

	// Assign appropriate parts of buffer to image planes in pFrameRGB
	// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
	// of AVPicture
	avpicture_fill((AVPicture *)pFrame, buffer, used_format, width, height);

	#ifndef __DARWIN__
			screen = SDL_SetVideoMode(width, height, 0, 0);
	#else
			screen = SDL_SetVideoMode(width, height, 24, 0);
	#endif

	if(!screen)
	{
		ERROR( "SDL: could not set video mode - exiting\n");
		return -1;
	}
	bmp = SDL_CreateYUVOverlay(width, height,	SDL_YV12_OVERLAY, screen);

	ioctl(file_desc, VIDIOC_STREAMON, &packet_offset);
	//		return NULL;
//	secs = time(NULL);
//	frame_index = 0;
	while(1)
	{
//		frame_index++;
//		if( frame_index % 50 == 0)
//			fprintf(stderr, "frames per second: %f\n", ((float)frame_index)/(time(NULL) - secs));
		packet_offset = 0;
		frame_finito = 0;
		//buffer = read_last_frame();
		while(!frame_finito)
		{

			if(!ioctl(file_desc, VIDIOC_DQBUF, &packet))
			{
				//CopyBufferToFrame(pFrame, packet->length, packet->buffer, packet_offset);
				memcpy(buffer + packet_offset, packet->buffer, packet->length);
				packet_offset += packet->length;
				frame_finito = packet->frame_finito;
				ioctl(file_desc, VIDIOC_QBUF, packet);
				if(packet_offset > numBytes )
				{
					ERROR("recieving more packets than required for a frame\n");
					frame_finito = 1;
				}
			}
			else
			{
				SDL_Delay(100);
			}
		}

        SDL_LockYUVOverlay(bmp);

		pict.data[0] = bmp->pixels[0];
		pict.data[1] = bmp->pixels[2];
		pict.data[2] = bmp->pixels[1];

		pict.linesize[0] = bmp->pitches[0];
		pict.linesize[1] = bmp->pitches[2];
		pict.linesize[2] = bmp->pitches[1];

		frm = (AVPicture *)pFrame;

		img_convert(&pict, PIX_FMT_YUV420P, frm, used_format, width,height);

		SDL_UnlockYUVOverlay(bmp);

		rect.x = 0;
		rect.y = 0;
		rect.w = width;
		rect.h = height;

		SDL_DisplayYUVOverlay(bmp, &rect);
	}
}




// Close the video frame grabber
int close_video_FG()
{
	if( ioctl(file_desc, VIDIOC_STREAMOFF, &i_from_main) )
		perror("VIDIOC_STREAMOFF");
	//munmap(gVAMap, gMapSize);
	close(file_desc);
	return 0;
}

/*
 * Negotiate with driver and set image size, format, video standard,
 * frequency ...
 * Ret	: 0 - fail
 *	  otherwise success
 */
int driver_nego()
{
	if( -1==(file_desc=open(my_video_dev, 0)) )
	{
		ERROR( "Unable to open char device error:%s\n", strerror(errno));
		return -1;
	}

	DEBUG("Openned device: %s\n",my_video_dev);
	if( show_cap() || set_format() )
	{
		return -1;
	}

	return 0;
}

int allocate_buffers()
{
	frame_packet	  			*packet;
    struct v4l2_requestbuffers 	req_bufs;
    uint i;

	if(ioctl(file_desc, VIDIOC_REQBUFS, &req_bufs))
	{
		ERROR( "Could not allocate buffers");
		return -1;
	}
	for( i=0; i<req_bufs.count; i++)
	{
		packet = (frame_packet *)av_mallocz(sizeof(frame_packet));
		if( packet == NULL)
		{
			ERROR("malloc failed\n");
			return -1;
		}
		if(ioctl(file_desc, VIDIOC_QBUF, packet))
		{
			ERROR("could not queue buffers\n");
			return -1;
		}
	}
	return 0;
}



int main(void)
{
	if( driver_nego(&file_desc) )
		return -1;

	if(ioctl(file_desc, VIDIOC_G_FMT,&fmt))
	{
		ERROR("Unable to get driver format\n");
		goto fmt_error;
	}

	if(allocate_buffers())
	{
		goto fmt_error;
	}

	run_screen();
	close_video_FG();
	return 0;

	fmt_error:
		close_video_FG();
	return -1;
}
