#pragma once

#ifdef _DEBUG
#pragma comment (lib, "Strmbasd.lib")
#else
#pragma comment (lib, "Strmbase.lib")
#endif

#define MemoryRendererName		L"UniqueMemoryRendererName"

#include "SmallMediaLibrary.h"
#include "dshow.h"
#include "Streams.h"
#include <initguid.h>
#include "combase.h"
#include <vector>
#include "renbase.h"
#include <vcclr.h>

using namespace System;
using namespace Threading;

namespace SmallMediaLibrary {
	ref class VideoPlayer;
}
using namespace SmallMediaLibrary;

/*
A managed object similar to RECT
*/
public ref struct Region
{
	int X, Y, Width, Height;

	Region(int x, int y, int width, int height)
	{
		X = x;
		Y = y;
		Width = width;
		Height = height;
	}

	RECT ToRect()
	{
		RECT r;
		r.left = X;
		r.right = X + Width;
		r.top = Y;
		r.bottom = Y + Height;
		return r;
	}
};

/*
The Pixel and FPixel structs are helpers for the sampling methods. Pixel represents a 32bit ARGB colour value, which is the native
format. All source data is converted to this, before being transformed, then written out in the appropriate target format.

FPixel is a helper, equivalent to Pixel but capable of storing large (non normalised) values, to make it easier to average sets of
pixels.
*/

public struct Pixel
{
	unsigned char R;
	unsigned char G;
	unsigned char B;
	unsigned char A;

	Pixel ToRGBA()
	{
		return *(this);
	}

	Pixel ToARGB()
	{
		Pixel p = { A, R, G, B };
		return p;
	}


};

public struct FPixel
{
	float R;
	float G;
	float B;
	float A;

	FPixel()
	{
		R = G = B = A = 0;	
	}

	FPixel operator+(Pixel a)
	{
		FPixel p;
		p.R = R + a.R;
		p.G = G + a.G;
		p.B = B + a.B;
		p.A = A + a.A;
		return p;
	}

	Pixel operator/(int v)
	{
		Pixel p;
		p.R = (char)(R /v);
		p.G = (char)(G /v);
		p.B = (char)(B /v);
		p.A = (char)(A /v);
		return p;
	}
};

/* 
VideoFrame represents one unique target buffer, that the renderer may write samples into. The host application will 
create and configure these objects, then pass them to the videoplayer to receive data
*/
public ref class VideoFrame
{
public:
	VideoFrame();
	VideoFrame(int width, int height);
		
	~VideoFrame();

public:
	int BufferLength;
	int BufferStride;
	IntPtr Buffer;

	Region^ DestinationRegion;

	/**/
	//http://msdn.microsoft.com/en-us/library/system.threading.semaphoreslim.aspx
	SemaphoreSlim^ Lock;	

	enum PixelFormat PixelFormat;

};

/* 
 * VideoMemoryRenderer is the Render Filter used by VideoPlayer to end the filter graph, and is
 * responsible for transforming the video frame samples, and writing them into memory in the correct
 * format.

 * Review this article to reference which methods to override and how for CBaseRenderer
 * http://www.codeproject.com/Articles/152317/DirectShow-Filters-Development-Part-1-Video-Render 
 */


public class VideoMemoryRenderer : public CBaseVideoRenderer
{
public:
	VideoMemoryRenderer(LPUNKNOWN	pOwner, 
						HRESULT		*pResult);
	virtual ~VideoMemoryRenderer();


	virtual HRESULT DoRenderSample(IMediaSample *pMediaSample);
    virtual HRESULT CheckMediaType(const CMediaType *pmt);
    virtual HRESULT SetMediaType(const CMediaType *pmt);

protected:
	HRESULT CheckMediaFormat(const CMediaType *pmt);
	HRESULT LoadMediaFormat(const CMediaType *pmt);
	
	bool IsBitmapCompressionValid(BITMAPINFOHEADER bmpinfo);

public:
	RECT SourceSize;					//The size of the samples that are to be received
	RECT SourceRegion;					//The part of the sample to copy into the destination buffer

protected:
	int SubPixelSampleCount;			//this is not exposed because it isn't working yet

protected:
	enum PixelFormat sourcePixelFormat;
	int source_stride;

	//the destination rect defines the region in the destination frame to write to (per filter), this is ignored, so far we havent encountered a filter that uses it.
	//The VideoFrame object has an equivalent property for things like PIP, but works at the frame level, as opposed to filter level
	RECT destination_region;

	void StretchBlt(
				BYTE* source,
				int sourceStride,
				RECT sourceSize,
				RECT sourceRegion,
				PixelFormat srcFormat,

				BYTE* destination,
				int destinationStride,
				int destinationLength,
				RECT destRegion,
				PixelFormat destFormat
				);

	Pixel SamplePixel(
				BYTE* source, 
				int sourceStride, 
				PixelFormat sourceFormat, 
				int x, int y);

	Pixel SubSamplePixel(
				BYTE* source, 
				int sourceStride, 
				PixelFormat sourceFormat, 
				float x, float y,
				int maxX, int maxY,
				int quality);

public:
	gcroot<VideoFrame^> ReceivingFrame;			//the VideoFrame the renderer is to write the samples into
	CRITICAL_SECTION receivingFrameMutex;		//locks access to the ReceivingFrame member to stop it being changed during rendering

	gcroot<VideoPlayer^> Player;
};

