#include "Wrappers.h"
#include <iostream>
#include <exception>

// Allocate and initialize format context
AVFormatContext* CreateFormatContext(
	const char *ai_fileName,
	const char *ai_shortName,
	const char *ai_fileExtension,
	const char *ai_mimeType)
{
	AVFormatContext* formatCtx = av_alloc_format_context();
	if (!formatCtx)
		throw std::exception("Memory error");

	formatCtx->oformat = guess_stream_format(ai_shortName, ai_fileExtension, ai_mimeType);
	if (!formatCtx->oformat)
	{
		std::cerr << "Could not find suitable output format, using avi-mpeg4" << std::endl;
		formatCtx->oformat = guess_format("avi", NULL, NULL);
	}
	if (!formatCtx->oformat)
	{
		throw std::exception("Could not find suitable output format");
	}

	sprintf(formatCtx->filename, "%s", ai_fileName);

	return formatCtx;
}

// Create a new video stream
AVStream *CreateVideoStream(
	AVFormatContext* ai_formatCtx, 
	CvSize ai_size, 
	int ai_bitRate, 
	int ai_frameRate,
	int ai_gopSize,
	int ai_bFrames,
	const PixelFormat& ai_pixFmt)
{
	// Create the new video stream
	AVStream *st = av_new_stream(ai_formatCtx, 0);
	if (!st) 
	{
		std::cerr << "Could not alloc stream" << std::endl;
		return NULL;
	}

	// Initialize the codec context
	AVCodecContext *c = st->codec;
	c->codec_id		  = ai_formatCtx->oformat->video_codec;
	c->codec_type	  = CODEC_TYPE_VIDEO;
	c->bit_rate		  = ai_bitRate;
	c->width		  = ai_size.width;
	c->height		  = ai_size.height;
	// time base: this is the fundamental unit of time (in seconds) in terms
	// of which frame timestamps are represented. for fixed-fps content,
	// timebase should be 1/framerate and timestamp increments should be
	// identically 1.
	c->time_base.num  = 1;
	c->time_base.den  = ai_frameRate;
	c->gop_size		  = ai_gopSize;
	c->pix_fmt		  = ai_pixFmt;

	if (c->codec_id == CODEC_ID_MPEG2VIDEO) 
		c->max_b_frames = ai_bFrames; // we also add B frames
	if (c->codec_id == CODEC_ID_MPEG1VIDEO)
	{
		// needed to avoid using macroblocks in which some coeffs overflow
		// this doesnt happen with normal video, it just happens here as the
		// motion of the chroma plane doesnt match the luma plane
		c->mb_decision=2;
	}

	// Some formats want stream headers to be seperate
	const char *fn = ai_formatCtx->oformat->name;
	if (!strcmp(fn, "mp4") || !strcmp(fn, "mov") || !strcmp(fn, "3gp"))
		c->flags |= CODEC_FLAG_GLOBAL_HEADER;

	return st;
}

// Allocate memory for a new frame
AVFrame *CreateFrame(int ai_pixFmt, int ai_width, int ai_height)
{
	AVFrame *picture = avcodec_alloc_frame();
	if (!picture)
		return NULL;

	uint8_t *pictureBuf = (uint8_t*)av_malloc(avpicture_get_size(ai_pixFmt, ai_width, ai_height));
	if (!pictureBuf) 
		av_freep(picture);

	avpicture_fill((AVPicture *)picture, pictureBuf, ai_pixFmt, ai_width, ai_height);

	return picture;
}

// Release memory from a AVFrame
void ReleaseFrame(AVFrame *ai_picture, bool ai_releaseData)
{
	if (ai_picture) 
	{
		if (ai_releaseData && ai_picture->data[0]) 
		{
			av_free(ai_picture->data[0]);
		}
		av_free(ai_picture);
	}
}

bool ValidateImage(IplImage *ai_image)
{
	if ((ai_image == NULL) ||
		(ai_image->nChannels != 3) ||
		(ai_image->depth != IPL_DEPTH_8U) ||
		(ai_image->width%2 != 0) ||
		(ai_image->height%2 != 0) ||
		(ai_image->imageSize != avpicture_get_size(
		 PIX_FMT_BGR24, ai_image->width, ai_image->height)))
	{
		return false;
	}
	else 
	{
		return true;
	}
}