#include "NeuralNet.h"
SK

NeuralNet::NeuralNet(u32 numHiddenLayers, u32 numNeuronsPerlayer, u32 numInputs, u32 numOutputs, REAL learnRate)
:numHidden (numHiddenLayers), numNeurons (numNeuronsPerlayer), numInput (numInputs), numOutput (numOutputs), learnRate(learnRate)
{
	this->build();
}

NeuralNet::~NeuralNet ()
{
	for (u32 i=0; i<numHidden+1; i++)
	{
		if (i == numHidden)
		{
			//delete output layer
			for (u32 j=0; j<numOutput; j++)
			{
				delete [] layers [i][j];
			}
		}
		else
		{
			for (u32 j=0; j<numNeurons; j++)
			{
				delete [] layers [i][j];
			}
		}
		delete[] layers [i];
	}
	delete [] layers;
	delete [] output;
}

void NeuralNet::build (void)
{
	layers = new Neuron ** [numHidden+1];

	if (numHidden == 0)
	{
		//only 1 layer needed, the output layer
		layers[0] = new pNeuron [numOutput];

		for (u32 i=0; i<numOutput;i++)
			layers [0][i] = new Neuron (numInput);
		
	}
	else
	{
		for (u32 i=0; i<numHidden; i++)
		{
			layers [i] = new pNeuron [numNeurons];

			for (u32 j=0; j<numNeurons; j++)
			{
				if (i==0)
					layers [i][j] = new Neuron (numInput);
				else
					layers [i][j] = new Neuron (numNeurons);
			}
		}

		//create output layer
		layers[numHidden] = new pNeuron [numOutput];

		for (u32 i=0; i<numOutput;i++)
			layers [numHidden][i] = new Neuron (numNeurons);
	}

	output = new REAL [numOutput];
}

REAL NeuralNet::actFunc (REAL n)
{
	return 1.0f / (1.0f + pow (2.7183f,-n));
}


void NeuralNet::run (void)
{
	for (u32 curLayer=0; curLayer<numHidden+1;curLayer++)
	{
		
		u32 numNeu;
		if (curLayer == numHidden)
			numNeu = numOutput;
		else
			numNeu = numNeurons;

		for (u32 curNeuron=0; curNeuron<numNeu; curNeuron++)
		{
			u32 numIn;
			REAL act=0.0f;
			if (curLayer == 0)
			{
				numIn = numInput;
				for (u32 curIn=0; curIn < numInput; curIn++)
					act += inputData [curIn] * layers [curLayer][curNeuron]->weights [curIn];
			}
			else
			{
				numIn = numNeurons;
				for (u32 curIn=0; curIn < numNeurons; curIn++)
					act += layers [curLayer-1][curIn]->output * layers [curLayer][curNeuron]->weights [curIn];
			}

			act -= layers [curLayer][curNeuron]->threshold;
			layers [curLayer][curNeuron]->output = actFunc (act);
		}
	}

	//copy output to output array
	for (u32 i=0; i<numOutput;i++)
	{
		output [i] = layers [numHidden][i]->output;
	}
}

void NeuralNet::train (REAL *desiredOutput)
{
	this->run();

	//start off at last layer and propagate errors backwards
	for (i32 curLayer=numHidden; curLayer >= 0; curLayer--)
	{
		//For every neuron in this layer
		u32 numNeu;	//num neurons in this layer
		u32 nexNumNeu; //num neurons in the layer above
		u32 numIn;	//number of inputs to the current layer
		if (curLayer == numHidden)	//current layer is output layer
		{
			numNeu = numOutput;

			REAL error;
			REAL out;
			for (u32 curNeuron=0; curNeuron<numNeu; curNeuron++)
			{
				out = layers[curLayer][curNeuron]->output;
				error = desiredOutput [curNeuron] - out;
				layers[curLayer][curNeuron]->errGradient = out * (1.0f - out) * error;
			}
		}
		else
		{
			numNeu = numNeurons;
			if (curLayer == numHidden - 1)
				nexNumNeu = numOutput;
			else
				nexNumNeu = numNeurons;

			for (u32 curNeuron=0; curNeuron<numNeu; curNeuron++)
			{
				REAL errGrad = 0.0f;
				REAL out = layers [curLayer][curNeuron]->output;

				for (u32 curNxtLayerNeuron = 0;curNxtLayerNeuron < nexNumNeu; curNxtLayerNeuron++)
				{
					errGrad += layers [curLayer + 1][curNxtLayerNeuron]->errGradient * layers [curLayer + 1][curNxtLayerNeuron]->weights [curNeuron];
				}

				 layers [curLayer][curNeuron]->errGradient = out * (1.0f - out) * errGrad;
			}
		}

		if (curLayer == 0)
		{
			numIn = numInput;
			for (u32 curNeuron=0; curNeuron < numNeu; curNeuron++)
			{
				for (u32 curIn=0; curIn < numIn; curIn++)
				{
					layers [curLayer][curNeuron]->weights [curIn] += learnRate * inputData [curIn] * layers[curLayer][curNeuron]->errGradient; 
					layers [curLayer][curNeuron]->threshold += learnRate * -1.0f * layers[curLayer][curNeuron]->errGradient; 
				}
			}
		}
		else
		{
			numIn = numNeurons;
			for (u32 curNeuron=0; curNeuron < numNeu; curNeuron++)
			{
				for (u32 curIn=0; curIn < numIn; curIn++)
				{
					layers [curLayer][curNeuron]->weights [curIn] += learnRate * layers[curLayer -1][curIn]->output * layers[curLayer][curNeuron]->errGradient; 
					layers [curLayer][curNeuron]->threshold += learnRate * -1.0f * layers[curLayer][curNeuron]->errGradient; 
				}
			}
		}
		
	}

}

void NeuralNet::setInput (REAL *data)
{
	inputData = data;
}

REAL *NeuralNet::getOutput (void)
{
	return output;
}

String NeuralNet::getDebuginfo (void)
{
	StringStream st;

	st << "Net Dump:" << std::endl;

	for (u32 i=0; i<numHidden + 1; i++)
	{
		if (i == 0)
		{
			st << "Layer " << i << ":" << std::endl;
			for (u32 inp=0; inp < numInput; inp++)
			{
				st << "\tInput " << inp << ": " << inputData [inp] << std::endl;
			}

			for (u32 outp=0; outp < numNeurons; outp++)
			{
				st << "\tOutput " << outp << ": " << layers [i][outp]->output << std::endl;
			}

			for(u32 j=0; j < numNeurons; j++)
			{
				for (u32 we=0; we < numInput; we++)
				{
					st << "\tWeight " << we << ": " << layers [i][j]->weights [we] << std::endl;
				}
			}
		}
		else if (i == numHidden)
		{
			//output layer

			st << "Layer " << i << ":" << std::endl;
			for (u32 inp=0; inp < numNeurons; inp++)
			{
				st << "\tInput " << inp << ": " <<  layers [i-1][inp]->output << std::endl;
			}

			for (u32 outp=0; outp < numOutput; outp++)
			{
				st << "\tOutput " << outp << ": " << layers [i][outp]->output << std::endl;
			}

			for(u32 j=0; j < numOutput; j++)
			{
				for (u32 we=0; we < numInput; we++)
				{
					st << "\tWeight " << we << ": " << layers [i][j]->weights [we] << std::endl;
				}
			}
		}
		else
		{
			//middle layer

			st << "Layer " << i << ":" << std::endl;
			for (u32 inp=0; inp < numNeurons; inp++)
			{
				st << "\tInput " << inp << ": " << layers [i-1][inp]->output << std::endl;
			}

			for (u32 outp=0; outp < numNeurons; outp++)
			{
				st << "\tOutput " << outp << ": " << layers [i][outp]->output << std::endl;
			}

			for(u32 j=0; j < numNeurons; j++)
			{
				for (u32 we=0; we < numInput; we++)
				{
					st << "\tWeight " << we << ": " << layers [i][j]->weights [we] << std::endl;
				}
			}
		}
	}

	st << std::endl;
	return st.str();
}

EK