// TDlambda.cpp: implementation of the TDlambda class.
//
//////////////////////////////////////////////////////////////////////

//#include "stdafx.h"
#include "Common/NeuralNet/TDlambda.h"
#include <stdlib.h>
#include <stdio.h>
#include <time.h>
#include <math.h>
//#include <crtdbg.h>

//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////

TDlambda::TDlambda(int inCountInputLayer, int inCountHiddenLayer, double inAlpha, double inLambda)
{
	countInputLayer = inCountInputLayer;
	countHiddenLayer = inCountHiddenLayer;
	alpha = inAlpha;
	lambda = inLambda;
	weightsHiddenLayer = NULL;
	weightsOutputLayer = NULL;
}

TDlambda::~TDlambda()
{
	if (weightsHiddenLayer != NULL) 
	{
		for (int i = 0; i < countHiddenLayer; i++)
			delete [] weightsHiddenLayer[i];

		delete [] weightsHiddenLayer;
	}
	if (weightsOutputLayer != NULL) delete [] weightsOutputLayer;

}

int TDlambda::Initialize()
{
	int i, j;

	srand( (unsigned)time( NULL ) );

	weightsHiddenLayer = new double*[countHiddenLayer];

	for (i = 0; i < countHiddenLayer; i++)
		weightsHiddenLayer[i] = new double[countInputLayer + 1];

	for (i = 0; i < countHiddenLayer; i++)
		for(j = 0; j < countInputLayer + 1; j++)
			weightsHiddenLayer[i][j] = Random();

	weightsOutputLayer = new double[countHiddenLayer + 1];

	for (i = 0; i < countHiddenLayer + 1; i++)
		weightsOutputLayer[i] = Random();

	return 0;
}

double TDlambda::Random()
{
	return ((double)rand()/(double)RAND_MAX);
}

double TDlambda::GetNetResponse(double *input, double *y)
{
	int i, j;
	double sum = 0;
	//double *y;
	double z;
	//y = new double[countHiddenLayer];


	for (i = 0; i < countHiddenLayer; i++)
	{
		sum = weightsHiddenLayer[i][countInputLayer];

		for (j = 0; j < countInputLayer; j++) 
			sum = sum+weightsHiddenLayer[i][j] * input[j];

		y[i] = ActivationFunction(sum);
	}

	sum = weightsOutputLayer[countHiddenLayer];
	for (i = 0; i < countHiddenLayer; i++) sum = sum + weightsOutputLayer[i] * y[i];

	z = ActivationFunction(sum);

	//delete [] y;

	return z;
}

double TDlambda::ActivationFunction(double net)
{
/*	if (net > 10.0) return 1.0;
	else if (net < -10.0) return 0.0;
	else /**/return (1.0/(1.0+exp(-net)));
}

int TDlambda::LocalGradientConstruction(double *x, double *y, double z, double **gradientHiddenLayer, double *gradientOutputLayer)
{
	int i, j;

	gradientOutputLayer[countHiddenLayer] = z * (1 - z);

	for (i = 0; i < countHiddenLayer; i++) gradientOutputLayer[i] = gradientOutputLayer[countHiddenLayer] * y[i];

	for (i = 0; i < countHiddenLayer; i++)
		gradientHiddenLayer[i][countInputLayer] = y[i] * (1 - y[i]) * gradientOutputLayer[countHiddenLayer] * weightsOutputLayer[i];

	for (i = 0; i < countHiddenLayer; i++)
		for (j = 0; j < countInputLayer; j++) 
			gradientHiddenLayer[i][j] = gradientHiddenLayer[i][countInputLayer] * x[j];

	return 0;
}

int TDlambda::GradientConstruction(double **P_seq, double z_reward, int length/*, double *deltaWeightsOutputLayer, double **deltaWeightsHiddenLayer*/)
{
//	_ASSERTE( _CrtCheckMemory( ) );

	int i, j;

	double *deltaWeightsOutputLayer;
	double **deltaWeightsHiddenLayer;

	double *gradWeightsOutputLayer;
	double **gradWeightsHiddenLayer;

	double *gradWeightsOutputLayerAux;
	double **gradWeightsHiddenLayerAux;

	double *P;

	double **yAll;

	deltaWeightsOutputLayer = new double[countHiddenLayer + 1];
	gradWeightsOutputLayer = new double[countHiddenLayer + 1];
	gradWeightsOutputLayerAux = new double[countHiddenLayer + 1];

	deltaWeightsHiddenLayer = new double*[countHiddenLayer];
	gradWeightsHiddenLayer = new double*[countHiddenLayer];
	gradWeightsHiddenLayerAux = new double*[countHiddenLayer];
	
	for (i = 0; i < countHiddenLayer; i++)
	{
		deltaWeightsHiddenLayer[i] = new double[countInputLayer + 1];
		gradWeightsHiddenLayer[i] = new double[countInputLayer + 1];
		gradWeightsHiddenLayerAux[i] = new double[countInputLayer + 1];
	}

	P = new double[length + 1];

	yAll = new double*[length];

	for (i = 0; i < length; i++)
		yAll[i] = new double[countHiddenLayer];

	for (i = 0; i < countHiddenLayer + 1; i++)
		deltaWeightsOutputLayer[i] = 0;

	for (i = 0; i < countHiddenLayer; i++)
		for (j = 0; j < countInputLayer + 1; j++) 
			deltaWeightsHiddenLayer[i][j] = 0;

	for (int t = 0; t < length; t++)
	{
		P[t] = GetNetResponse(P_seq[t], yAll[t]);
	}

	P[t] = z_reward;

	for (t = 0; t < length; t++)
	{
		LocalGradientConstruction(P_seq[t], yAll[t], P[t], gradWeightsHiddenLayer, gradWeightsOutputLayer);
		
		if (t == 0)
		{
			for(i = 0; i < countHiddenLayer + 1; i++)
				gradWeightsOutputLayerAux[i] = gradWeightsOutputLayer[i];

			for (i = 0; i < countHiddenLayer; i++)
				for (j = 0; j < countInputLayer + 1; j++) 
					gradWeightsHiddenLayerAux[i][j] = gradWeightsHiddenLayer[i][j];
		}
		else
		{
			for(i = 0; i < countHiddenLayer + 1; i++)
				gradWeightsOutputLayerAux[i] = lambda * gradWeightsOutputLayerAux[i] + gradWeightsOutputLayer[i];

			for (i = 0; i < countHiddenLayer; i++)
				for (j = 0; j < countInputLayer + 1; j++) 
					gradWeightsHiddenLayerAux[i][j] = lambda * gradWeightsHiddenLayerAux[i][j] + gradWeightsHiddenLayer[i][j];
		}

		for(i = 0; i < countHiddenLayer + 1; i++)
			deltaWeightsOutputLayer[i] += alpha * (P[t + 1] - P[t]) * gradWeightsOutputLayerAux[i];

		for (i = 0; i < countHiddenLayer; i++)
			for (j = 0; j < countInputLayer + 1; j++) 
				deltaWeightsHiddenLayer[i][j] += alpha * (P[t + 1] - P[t]) * gradWeightsHiddenLayerAux[i][j];
	}

//	_ASSERTE( _CrtCheckMemory( ) );
	//Update weights
	for(i = 0; i < countHiddenLayer + 1; i++)
		weightsOutputLayer[i] += deltaWeightsOutputLayer[i];

//	_ASSERTE( _CrtCheckMemory( ) );
	for (i = 0; i < countHiddenLayer; i++)
		for (j = 0; j < countInputLayer + 1; j++) 
			weightsHiddenLayer[i][j] += deltaWeightsHiddenLayer[i][j];


	//clean up
//	_ASSERTE( _CrtCheckMemory( ) );

	delete [] deltaWeightsOutputLayer;
	delete [] gradWeightsOutputLayer;
	delete [] gradWeightsOutputLayerAux;

//	_ASSERTE( _CrtCheckMemory( ) );

	for (i = 0; i < countHiddenLayer; i++)
	{
		delete [] deltaWeightsHiddenLayer[i];
		delete [] gradWeightsHiddenLayer[i];
		delete [] gradWeightsHiddenLayerAux[i];
	}

//	_ASSERTE( _CrtCheckMemory( ) );

	delete [] deltaWeightsHiddenLayer;
	delete [] gradWeightsHiddenLayer;
	delete [] gradWeightsHiddenLayerAux;

//	_ASSERTE( _CrtCheckMemory( ) );

	delete [] P;

	for (i = 0; i < length; i++)
		delete [] yAll[i];

	delete [] yAll;

	return 0;
}

int TDlambda::SaveNetToDisk(char* fileName)
{
	FILE* out = fopen(fileName, "w");

	fprintf(out, "%f %d %d %f\n", alpha, countHiddenLayer, countInputLayer, lambda);

	for (int i = 0; i < countHiddenLayer; i++)
	{
		for (int j = 0; j < countInputLayer + 1; j++) 
			fprintf(out, "%f ", weightsHiddenLayer[i][j]);
		fprintf(out, "\n");
	}

	for(i = 0; i < countHiddenLayer + 1; i++)
		fprintf(out, "%f ", weightsOutputLayer[i]);

	fclose(out);

	return 0;
}

int TDlambda::LoadNetFromDisk(char* fileName)
{
	FILE* out = fopen(fileName, "r");

	fscanf(out, "%lf %d %d %lf\n", &alpha, &countHiddenLayer, &countInputLayer, &lambda);

	for (int i = 0; i < countHiddenLayer; i++)
	{
		for (int j = 0; j < countInputLayer + 1; j++) 
			fscanf(out, "%lf ", &weightsHiddenLayer[i][j]);
	}

	for(i = 0; i < countHiddenLayer + 1; i++)
		fscanf(out, "%lf ", &weightsOutputLayer[i]);

	fclose(out);

	return 0;
}

