#include "Block.h"

// all network helper classes shall be derived from this class
// they need to implement the virtual methods
class NetworkHelper
{
	public:

		NetworkHelper(int reductionFactor, int bufferHeight, int bufferWidth)
		{
			framesStored = 0;
			frameReductionFactor = reductionFactor;
			trainingData = new TSLCRealMatrixBuffer(bufferHeight, bufferWidth);
			responses = new TSLCRealMatrixBuffer( bufferHeight, 1 );
			trainingData->Set(0);
			responses->Set(0);
			this->bufferHeight = bufferHeight;
			this->bufferWidth = bufferWidth;
		}

		~NetworkHelper()
		{
            delete trainingData;
			delete responses;
        }

		virtual UnicodeString prepareTrainingData(TVLCVideoBuffer InBuffer, __int64 currentFrame, Content response) = 0;

		// this method is called to process a frame to a RealBuffer the neural network accepts
		virtual TSLCRealBuffer prepareTestData(TVLCVideoBuffer InBuffer) = 0;

		TSLCRealMatrixBuffer getTrainingData()
		{
			return *trainingData;
		}

		TSLCRealMatrixBuffer getResponses()
		{
			return *responses;
		}

		void resetFramesStored()
		{
            framesStored = 0;
		}

		// This method removes frames of the frame type that occurs more often.
		// In a normal movie this would be movie frames.
		// The frames are then stored alternating and are shuffeled.
		void balanceTrainingData(bool alternateFrames)
		{
			int noAdFrames = 0;
			int noMovFrames = 0;
			int noRows = bufferHeight;
			bool* frameRead = new bool[noRows];
			TSLCRealMatrixBuffer* newTrainingData = 0;
			TSLCRealMatrixBuffer* newResponses = 0;
			int rows = 0;

			// initialise to false
			for (int i = 0; i < noRows; i++) {
				frameRead[i] = false;
			}

			// count frames
			for (int i = 0 ; i < framesStored; i++) {
				if((*responses)[i][0] == MOVIE) {
					noMovFrames++;
				}
				else if ((*responses)[i][0] == AD) {
					noAdFrames++;
				}
			}

			// determine which type of frame occurs less often and create the
			// new training data matrix for double the size of the less often
			// frame type
			if ( noAdFrames != noMovFrames) {
				if (noAdFrames < noMovFrames) {
					//int trainingDataCols = trainingData->GetCols();
					rows = noAdFrames * 2;
					newTrainingData = new TSLCRealMatrixBuffer(rows, bufferWidth);//trainingData->GetCols());
					newResponses = new TSLCRealMatrixBuffer(rows, 1);

				}
				else {
					rows = noMovFrames * 2;
					newTrainingData = new TSLCRealMatrixBuffer(rows + 1, bufferWidth);//trainingData->GetCols());
					newResponses = new TSLCRealMatrixBuffer(rows + 1, 1);
				}

				// set everything to zero
				newTrainingData->Set(0);
                newResponses->Set(0);

				// seed random number generator
				srand( (unsigned)time( NULL ) );

				// fill the new training data matrix with the data from the old matrix

				int loopStop = rows * 0.75;
				for (int i = 0; i < loopStop; i++) {	// only till 75% of the buffer to ensure good performance
					int randomFrame = rand() % framesStored;

					// has this frame already been copied?
					// MOVIE and AD parts should alternate
					if (alternateFrames) {
                    	while (frameRead[randomFrame] == true
						&& !((i % 2 == 0 && (*responses)[i][0] == MOVIE)
						|| (i % 2 == 1 && (*responses)[i][0] == AD))) {
							randomFrame = rand() % framesStored;
						}
					} else {
                    	while (frameRead[randomFrame] == true) {
							randomFrame = rand() % framesStored;
						}
					}

					// copy data
					newResponses->SetItem(i, 0,
						responses->GetItem(randomFrame, 0));

					for (int j = 0; j < bufferWidth; j++) {
						newTrainingData->SetItem(i, j,
							trainingData->GetItem(randomFrame, j));
					}

					frameRead[randomFrame] = true;	// mark this frame as copied
				}

				// do the remaining 25%
				for (int i = loopStop; i < rows; i++) {
					for (int j = 0; j < rows; j++) {
						if (frameRead[j] == false
						&& ((i % 2 == 0 && (*responses)[j][0] == MOVIE)
						|| (i % 2 == 1 && (*responses)[j][0] == AD))) {
							// copy data
							(*newResponses)[i][0] = (*responses)[j][0];

							for (int v = 0; v < bufferWidth; v++) {
								(*newTrainingData)[i][v] = (*trainingData)[j][v];
							}

							frameRead[j] = true;	// mark as copied
							break;
						}
					}
				}


				// clean up old pointers
				delete frameRead;
				delete trainingData;
				delete responses;

				// redirect old pointers to new data
				responses = newResponses;
				trainingData = newTrainingData;
			}
		}

	protected:

		int framesStored;
		int frameReductionFactor;

        int bufferHeight;
		int bufferWidth;

		TSLCRealMatrixBuffer* trainingData;
		TSLCRealMatrixBuffer* responses;
};

class StandardNetworkHelper : public NetworkHelper
{
	public:

		StandardNetworkHelper(int reductionFactor, int bufferHeight, int bufferWidth) :NetworkHelper(reductionFactor, bufferHeight, bufferWidth) {}

		UnicodeString prepareTrainingData(TVLCVideoBuffer InBuffer, __int64 currentFrame, Content response)
		{
			int val = currentFrame % frameReductionFactor;
			int rows = trainingData->GetCount();
			if( val == 0 && framesStored < rows)
			{
				for ( int x = 0; x < InBuffer.GetWidth(); x ++ )
				{
					for ( int y = 0; y < InBuffer.GetHeight(); y ++ )
					  {
						  // map the two dimensional frame in the one dimensional array
						  // / 255.0 is normalization
						  (*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ y ] / 255.0;
						  (*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() ] = InBuffer.Red[ x ][ y ] / 255.0;
						  (*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() * 2 ] = InBuffer.Green[ x ][ y ] / 255.0;
					  }
				}

				(*responses)[ framesStored ][ 0 ] = response;
				framesStored++;
			}

			// display the current section in the GUI
			UnicodeString statusString = "Training in progress - section is ";
			switch (response) {
				case AD:
					statusString += "AD (";
					statusString += AD;
					statusString += ").";
					break;
				case MOVIE:
					statusString += "MOVIE (";
					statusString += MOVIE;
					statusString += ").";
					break;
				default:
					statusString = "";
					break;
			}
			return statusString;
		}

		TSLCRealBuffer prepareTestData(TVLCVideoBuffer InBuffer)
		{
			// create Buffer for training data
			int bufferWidth = InBuffer.GetWidth() * InBuffer.GetHeight() * 3;		// 3 because of RGB
			TSLCRealBuffer testData(bufferWidth);

			for ( int x = 0; x < InBuffer.GetWidth(); x ++ )
			{
				for ( int y = 0; y < InBuffer.GetHeight(); y ++ )
				  {
					  // map the two dimensional frame in the one dimensional array
					  testData[ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ y ] / 255.0;
					  testData[ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() ] = InBuffer.Red[ x ][ y ] / 255.0;
					  testData[ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() * 2 ] = InBuffer.Green[ x ][ y ] / 255.0;
				  }
			}
			return testData;
		}
};

class GreyscaleNetworkHelper : public NetworkHelper
{
	public:

		GreyscaleNetworkHelper(int reductionFactor, int bufferHeight, int bufferWidth) :NetworkHelper(reductionFactor, bufferHeight, bufferWidth) {}

		UnicodeString prepareTrainingData(TVLCVideoBuffer InBuffer, __int64 currentFrame, Content response)
		{
			int val = currentFrame % frameReductionFactor;
			if( val == 0)
			{
				for ( int x = 0; x < InBuffer.GetWidth(); x ++ )
				{
					for ( int y = 0; y < InBuffer.GetHeight(); y ++ )
					  {
						  // map the two dimensional frame in the one dimensional array
						   (*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ y ] / 255.0 / 3.0
								+ InBuffer.Red[ x ][ y ] / 255.0 / 3.0
								+ InBuffer.Green[ x ][ y ] / 255.0 / 3.0;
					  }
				}

				(*responses)[ framesStored ][ 0 ] = response;
				framesStored++;
			}

			// display the current section in the GUI
			UnicodeString statusString = "Training in progress - section is ";
			switch (response) {
				case AD:
					statusString += "AD (";
					statusString += AD;
					statusString += ").";
					break;
				case MOVIE:
					statusString += "MOVIE (";
					statusString += MOVIE;
					statusString += ").";
					break;
				default:
					statusString = "";
					break;
			}
			return statusString;
		}

		TSLCRealBuffer prepareTestData(TVLCVideoBuffer InBuffer)
		{
			// create Buffer for training data
			int bufferWidth = InBuffer.GetWidth() * InBuffer.GetHeight();		// 3 because of RGB
			TSLCRealBuffer testData(bufferWidth);

			for ( int x = 0; x < InBuffer.GetWidth(); x ++ )
			{
				for ( int y = 0; y < InBuffer.GetHeight(); y ++ )
				  {
					  // map the two dimensional frame in the one dimensional array
					  testData[ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ y ] / 255.0 / 3.0
							+ InBuffer.Red[ x ][ y ] / 255.0 / 3.0
							+ InBuffer.Green[ x ][ y ] / 255.0 / 3.0;
				  }
			}
			return testData;
		}
};

class ZigZagNetworkHelper : public NetworkHelper
{
	public:

		ZigZagNetworkHelper(int reductionFactor, int bufferHeight, int bufferWidth) :NetworkHelper(reductionFactor, bufferHeight, bufferWidth) {}

		UnicodeString prepareTrainingData(TVLCVideoBuffer InBuffer, __int64 currentFrame, Content response)
		{
			int val = currentFrame % frameReductionFactor;
			if( val == 0)
			{
				for ( int x = 0; x < InBuffer.GetWidth(); x ++ )
				{
					for ( int y = 0; y < InBuffer.GetHeight(); y ++ )
					{
						if (x%2 == 0)		// left to right
						{
							(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ y ] / 255.0;
							(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() ] = InBuffer.Red[ x ][ y ] / 255.0;
							(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() * 2 ] = InBuffer.Green[ x ][ y ] / 255.0;
						}
						else				// right to left
						{
							(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ InBuffer.GetWidth()-y ] / 255.0;
							(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() ] = InBuffer.Red[ x ][ InBuffer.GetWidth()-y ] / 255.0;
							(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() * 2 ] = InBuffer.Green[ x ][ InBuffer.GetWidth()-y ] / 255.0;
						}
					}
				}

				(*responses)[ framesStored ][ 0 ] = response;
				framesStored++;
			}

			// display the current section in the GUI
			UnicodeString statusString = "Training in progress - section is ";
			switch (response) {
				case AD:
					statusString += "AD (";
					statusString += AD;
					statusString += ").";
					break;
				case MOVIE:
					statusString += "MOVIE (";
					statusString += MOVIE;
					statusString += ").";
					break;
				default:
					statusString = "";
					break;
			}
			return statusString;
		}

		TSLCRealBuffer prepareTestData(TVLCVideoBuffer InBuffer)
		{
			// create Buffer for training data
			int bufferWidth = InBuffer.GetWidth() * InBuffer.GetHeight() * 3;		// 3 because of RGB
			TSLCRealBuffer testData(bufferWidth);

			for ( int x = 0; x < InBuffer.GetWidth(); x ++ )
			{
				for ( int y = 0; y < InBuffer.GetHeight(); y ++ )
				{
                	if (x%2 == 0)
					{
						(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ y ] / 255.0;
						(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() ] = InBuffer.Red[ x ][ y ] / 255.0;
						(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() * 2 ] = InBuffer.Green[ x ][ y ] / 255.0;
					}
					else
					{
						(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y ] = InBuffer.Blue[ x ][ InBuffer.GetWidth()-y ] / 255.0;
						(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() ] = InBuffer.Red[ x ][ InBuffer.GetWidth()-y ] / 255.0;
						(*trainingData)[ framesStored ][ x * InBuffer.GetHeight() + y + InBuffer.GetWidth() * InBuffer.GetHeight() * 2 ] = InBuffer.Green[ x ][ InBuffer.GetWidth()-y ] / 255.0;
					}
				}
			}
			return testData;
		}
};
