#include "Network.h"

Network::Network(int inputLayerNumber, int hiddenLayerNumber, int outputLayerNumber)
{
	this->inputLayerNumber = inputLayerNumber;
	this->hiddenLayerNumber = hiddenLayerNumber;
	this->outputLayerNumber = outputLayerNumber;
	learningRate = 0.3;
	hitThreshold = 0.5;
	classNumber= 2;
	isDebug = false;
	this->inputWeight = (double**)(new double*[inputLayerNumber]);
	for(int i = 0 ; i < inputLayerNumber ;i++)
	{
		inputWeight[i] = new double[hiddenLayerNumber];
		for(int j = 0 ;j < hiddenLayerNumber;j++)
		{
			inputWeight[i][j] = 0;
		}
	}
	
	this->outputWeight = (double**)(new double*[hiddenLayerNumber]);
	for(int i = 0 ;i < hiddenLayerNumber;i++)
	{
		outputWeight[i] = new double[outputLayerNumber];
		for(int j = 0 ;j < outputLayerNumber;j++)
		{
			outputWeight[i][j] = 0;
		}
		
	}
	hiddenBias = new double[hiddenLayerNumber];
	for(int i = 0 ;i < hiddenLayerNumber ;i++)
	{
		hiddenBias[i]= 0;
	}
	outputBias = new double[outputLayerNumber];
	for(int i = 0 ;i < outputLayerNumber ;i++)
	{
		outputBias[i]= 0;
	}
	initNetwork();
	//this->initNetworkTest();
}

Network::~Network()
{
	for(int i = 0 ; i < this->inputLayerNumber ; i++)
	{
		delete [] inputWeight[i];
	}
	delete [] inputWeight;
	for (int i = 0 ;i < this->outputLayerNumber ;i++ )
	{
		delete [] outputWeight[i];
	}
	delete [] outputWeight;
}

void Network::setLearningRate(double l)
{
	learningRate = l;
}

void Network::initNetwork()
{
	//Random seed = new Random();
	for (int i = 0; i < inputLayerNumber; i++){
		for (int j = 0; j < hiddenLayerNumber; j++)
			inputWeight[i][j] = getRandomValue();
	}
	
	for (int i = 0; i < hiddenLayerNumber; i++){
		for (int j = 0; j < outputLayerNumber; j++)
			outputWeight[i][j] = getRandomValue();
	}
	
	for (int i = 0; i < hiddenLayerNumber; i++){
		hiddenBias[i] = getRandomValue();
	}
	
	for (int i = 0; i < outputLayerNumber; i++){
		outputBias[i] = getRandomValue();
	}
}

void Network::initNetworkTest()
{
	double data1[3][2] = {
				{0.2, -0.3},
				{0.4, 0.1},
				{-0.5, 0.2}
		};
		
	for(int i = 0 ;i < 3 ;i++)
	{
		for(int j = 0;j < 2;j++)
		{
			inputWeight[i][j] = data1[i][j];
		}
	}
		//inputWeight = (double**)data1;
		
		double data2[2][1] = {\
				{-0.3},\
				{-0.2}\
		};
		for(int i = 0 ;i < 2 ;i++)
		{
			for(int j = 0;j < 1;j++)
			{
				outputWeight[i][j] = data2[i][j];
			}
		}
		//outputWeight = (double**)data2;
		
		double data3[] = {\
				-0.4, 0.2\
		};
		for(int i = 0 ;i < 2;i++)
		{
			hiddenBias[i] = data3[i];
		}
		//hiddenBias = data3;
		
		double data4[] = { 0.1};
		for(int i = 0 ;i < 1;i++)
		{
			outputBias[i] = data4[i];
		}
		//outputBias = data4;
}

void Network::forword(double *input)
{
	this->input = input;
	hiddenLayer = new double[hiddenLayerNumber];
	for (int i = 0; i < hiddenLayerNumber; i++){
		hiddenLayer[i] = hiddenBias[i];
		for (int j = 0; j < inputLayerNumber; j++){
			hiddenLayer[i] += input[j] * inputWeight[j][i];
		}
		//cout << hiddenLayer[i] << ":";
		hiddenLayer[i] = getOutputFromInput(hiddenLayer[i]);
		//cout << hiddenLayer[i] << endl;
	}
	
	outputLayer = new double[outputLayerNumber];
	for (int i = 0; i < outputLayerNumber; i++){
		outputLayer[i] = outputBias[i];
		for (int j = 0; j < hiddenLayerNumber; j++){
			outputLayer[i] += hiddenLayer[j] * outputWeight[j][i];
		}
		outputLayer[i] = getOutputFromInput(outputLayer[i]);
	}
}

void Network::backpropagation(double *output)
{
	double *outputError = new double[outputLayerNumber];
	for (int i = 0; i < outputLayerNumber; i++){
		outputError[i] = outputLayer[i] * (1 - outputLayer[i]);
		outputError[i] *= (output[i] - outputLayer[i]);
	}
	
	double *hiddenError = new double[hiddenLayerNumber];
	for (int i = 0; i < hiddenLayerNumber; i++){
		hiddenError[i] = hiddenLayer[i] * (1 - hiddenLayer[i]);
		double temp = 0;
		for (int j = 0; j < outputLayerNumber; j++){
			temp += outputError[j] * outputWeight[i][j];
		}
		hiddenError[i] *= temp;
	}
	
	for (int i = 0; i < hiddenLayerNumber; i++){
		for (int j = 0; j < outputLayerNumber; j++){
			outputWeight[i][j] += learningRate * outputError[j] * hiddenLayer[i];
		}
	}
	
	for (int i = 0; i < inputLayerNumber; i++){
		for (int j = 0; j < hiddenLayerNumber; j++){
			inputWeight[i][j] += learningRate * hiddenError[j] * input[i];
		}
	}
	
	for (int i = 0; i < outputLayerNumber; i++){
		outputBias[i] += learningRate * outputError[i];
	}
	
	for (int i = 0; i < hiddenLayerNumber; i++){
		hiddenBias[i] += learningRate * hiddenError[i];
	}
}

double Network::getRandomValue()
{
	double temp;
	temp = rand()/((double)(RAND_MAX));
	return (temp-0.5)*2;
}

double Network::getOutputFromInput(double input)
{
	double temp = 1 + pow(E, 0-input);
	//cout << temp <<endl;
	return (double)1 / temp;
}

void Network::printNetwork()
{
	//System.out.println("Input Matrix");
	cout << "Input Matrix" << endl;
	for (int i = 0; i < inputLayerNumber; i++)
	{
		for (int j = 0; j < hiddenLayerNumber; j++)
		{
			//printf("% .3f ", inputWeight[i][j]);
			
		}
		//System.out.println();
		//cout << endl;
	}
	
	//System.out.println("Output Matrix");
	cout << "Output Matrix"<<endl;
	for (int i = 0; i < hiddenLayerNumber; i++){
		for (int j = 0; j < outputLayerNumber; j++)
		{
			//printf("% .3f ", outputWeight[i][j]);
		}
		//cout << endl;
	}
	
	//System.out.println("HiddenLayer Bias");
	cout << "HiddenLayer Bias"<<endl;
	for (int i = 0; i < hiddenLayerNumber; i++){
		//printf("% .3f ", hiddenBias[i]);
	}
		//cout << endl;
	
	//System.out.println("OutLayer Bias");
	cout << "OutLayer Bias"<<endl;
	for (int i = 0; i < outputLayerNumber; i++){
		//printf("% .3f ", outputBias[i]);
	}
		//cout << endl;

}

bool Network::testTuple(vector<double>& input1)
{
	
	vector<double> tuple;
	tuple.resize(input1.size() - 2,0);
	for (int j = 0; j < input1.size() - 2; j++){
		tuple[j] = input1[j+2];
	}
	double *tupletemp =new double[tuple.size()];
	for(int i = 0 ;i < tuple.size();i++)
	{
		tupletemp[i] = tuple.at(i);
	}
	this->forword(tupletemp);
	double* output = getOutputFromNumber(input1[1]);
	for (int i = 0; i < outputLayerNumber; i++){
		//cout << outputLayer[i] << "\t" << input1[1] << "\t"<<fabs(outputLayer[i] - input1[1]) << endl;
		if (fabs(outputLayer[i] - output[i]) > hitThreshold)
		{
			return false;
		}
	}
	return true;
}

double *Network::getOutputFromNumber(double output)
{
	double* result;
	if (classNumber == 2){
		result = new double[1];
		result[0] = output;
	} else {
		result = new double[classNumber];
		for(int i = 0 ; i < classNumber;i++)
		{
			result[i] =0;
		}
		result[(int)output] = 1;
	}

	return result;
}