/*
 *  feeder.c - the process that will feed the web-cam driver frames
 *
 *  Until now we could have used cat for input and output.  But now
 *  we need to do ioctl's, which require writing our own process.
 */

/* 
 * device specifics, such as ioctl numbers and the
 * major device file. 
 */
#include "web_cam_dev.h"
#include <errno.h>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>		/* open */
#include <unistd.h>		/* exit */
#include <sys/ioctl.h>		/* ioctl */
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>

static int frame_width = 0;
static int frame_height = 0;


/* 
 * Functions for the ioctl calls 
 */

void ioctl_set_msg(int file_desc, char *message)
{
	int ret_val;

	ret_val = ioctl(file_desc, IOCTL_SET_MSG, message);

	if (ret_val < 0) {
		printf("ioctl_set_msg failed:%d\n", ret_val);
		exit(-1);
	}
}

void ioctl_get_msg(int file_desc)
{
	int ret_val;
	char message[100];

	/* 
	 * Warning - this is dangerous because we don't tell
	 * the kernel how far it's allowed to write, so it
	 * might overflow the buffer. In a real production
	 * program, we would have used two ioctls - one to tell
	 * the kernel the buffer length and another to give
	 * it the buffer to fill
	 */
	ret_val = ioctl(file_desc, IOCTL_GET_MSG, message);

	if (ret_val < 0) {
		printf("ioctl_get_msg failed:%d\n", ret_val);
		exit(-1);
	}

	printf("get_msg message:%s\n", message);
}

void ioctl_set_fr_size(int file_desc, int width, int height)
{
	int ret_val;
	int * size = (int *)malloc(sizeof(int) * 2);
	size[0] = width;
	size[1] = height;

	ret_val = ioctl(file_desc, IOCTL_SET_FR_SIZE, (char *) size);

	if (ret_val < 0) {
		printf("ioctl_set_fr_size failed:%d\n", ret_val);
		exit(-1);
	}
}

void FeedFrame(int file_desc, AVFrame *pFrame) {

	char * message = (char *)malloc(frame_width * frame_height);
	char * tmp = message;

	int i = 0;
	for ( i = 0; i < frame_height; ++i)
	{
		memcpy(tmp, pFrame->data[0] + i * frame_width, frame_width);
		tmp += frame_width;
	}

	ioctl_set_msg(file_desc, message);
	free(message);
/*
	FILE *pFile;
	char szFilename[32];
	int y;

	// Open file
	sprintf(szFilename, "frame.ppm");
	pFile = fopen(szFilename, "wb");
	if (pFile == NULL)
		return;

	// Write header
	fprintf(pFile, "P6\n%d %d\n255\n", width, height);

	// Write pixel data
	for (y = 0; y < height; y++)
		fwrite(pFrame->data[0] + y * pFrame->linesize[0], 1, width * 3, pFile);

	// Close file
	fclose(pFile);
*/
}

/* 
 * Main - Call the ioctl functions 
 */
int main(int argc, char ** argv)
{
	AVFormatContext *pFormatCtx = NULL;
	int i, videoStream;
	AVCodecContext *pCodecCtx;
	AVCodec *pCodec;
	AVFrame *pFrame;
	AVFrame *pFrameRGB;
	struct SwsContext * pSwsCtx;
	AVPacket packet;
	int frameFinished;
	int numBytes;
	uint8_t *buffer;
	int file_desc;
	char devLocation[256] = {0};

	strcpy(devLocation, "/dev/");
	strcat(devLocation, DEVICE_FILE_NAME);
	file_desc = open(devLocation, 0);
	if (file_desc < 0) {
		perror("The following error occured: ");
		printf("Can't open device file: %s\n", DEVICE_FILE_NAME);
		return -1;
	}

	if (argc < 2) {
		printf("Please provide a movie file\n");
		return -1;
	}
	// Register all formats and codecs
	av_register_all();

	// Open video file
	if (avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) != 0)
		return -1; // Couldn't open file

	// Retrieve stream information
	if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
		return -1; // Couldn't find stream information

	// Dump information about file onto standard error
	av_dump_format(pFormatCtx, 0, argv[1], 0);

	// Find the first video stream
	videoStream = -1;
	for (i = 0; i < pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
			videoStream = i;
			break;
		}
	if (videoStream == -1)
		return -1; // Didn't find a video stream

	// Get a pointer to the codec context for the video stream
	pCodecCtx = pFormatCtx->streams[videoStream]->codec;

	// Find the decoder for the video stream
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL) {
		fprintf(stderr, "Unsupported codec!\n");
		return -1; // Codec not found
	}
	// Open codec
	if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
		return -1; // Could not open codec

	// Allocate video frame
	pFrame = avcodec_alloc_frame();

	// Allocate an AVFrame structure
	pFrameRGB = avcodec_alloc_frame();
	if (pFrameRGB == NULL)
		return -1;

	// Determine required buffer size and allocate buffer
	numBytes = avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
			pCodecCtx->height);
	printf("numBytes for Frame: %d\n", numBytes);
	buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));

	// Assign appropriate parts of buffer to image planes in pFrameRGB
	// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
	// of AVPicture
	avpicture_fill((AVPicture *) pFrameRGB, buffer, PIX_FMT_RGB24,
			pCodecCtx->width, pCodecCtx->height);

	pSwsCtx = sws_getContext(pCodecCtx->width,
			pCodecCtx->height, pCodecCtx->pix_fmt,
			pCodecCtx->width, pCodecCtx->height,
			PIX_FMT_RGB24, SWS_FAST_BILINEAR, NULL, NULL, NULL);

	if (pSwsCtx == NULL) {
		fprintf(stderr, "Cannot initialize the sws context\n");
		return -1;
	}

	printf("\nWidth is: %d\n", pCodecCtx->width);
	printf("\nHeight is: %d\n", pCodecCtx->height);
	ioctl_set_fr_size(file_desc, pCodecCtx->width * 3, pCodecCtx->height);

	// Read frames and save first five frames to disk
	while (av_read_frame(pFormatCtx, &packet) >= 0) {
		// Is this a packet from the video stream?
		if (packet.stream_index == videoStream) {
			// Decode video frame
			avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

			// Did we get a video frame?
			if (frameFinished) {

				// Convert the image from its native format to RGB
				sws_scale(pSwsCtx,
							(const uint8_t * const *) pFrame->data,
							pFrame->linesize, 0, pCodecCtx->height,
							pFrameRGB->data,
							pFrameRGB->linesize);

				// Feed the frame to web-cam driver
				FeedFrame(file_desc, pFrameRGB);
			}
		}

		// Free the packet that was allocated by av_read_frame
		av_free_packet(&packet);
	}

	// Free the RGB image
	av_free(buffer);
	av_free(pFrameRGB);

	// Free the YUV frame
	av_free(pFrame);

	// Close the codec
	avcodec_close(pCodecCtx);

	// Close the video file
	avformat_close_input(&pFormatCtx);

	close(file_desc);
	return 0;
}
