#include "neurnet.h"

#define frand() 0.5 //((double) rand() / (double)RAND_MAX)


// Calculeaza rezultatul retelei la un sample anume
void NeuralNetwork::CalculateResult(int sampleNumber)
{
	using namespace std;

	int i, j;

	// calculez rezultatele neuronilor pe sample-ul acesta - hidden
	for(j=0; j<numHidden; j++)
	{
		resultsHidden[j]=0;

		for(i=0; i<numInput; i++)
			resultsHidden[j]+=weightsHidden[j][i]*samplesInput[sampleNumber][i];

		resultsHidden[j]+=weightsHidden[j][numInput];
		
		// neuronii sunt sigmoizi ==> aplicam functia 1/(1+exp(-...))
		resultsHidden[j]=1./(1.+exp(-resultsHidden[j]));
	}

	// calculez rezultatele neuronilor pe sample-ul acesta - output
	for(j=0; j<numInput; j++)
	{
		resultsOutput[j]=0;

		for(i=0; i<numHidden; i++)
			resultsOutput[j]+=weightsOutput[j][i]*resultsHidden[i];

		resultsOutput[j]+=weightsOutput[j][numHidden];

		// neuronii sunt sigmoizi ==> aplicam functia 1/(1+exp(-...))
		resultsOutput[j]=1/(1+exp(-resultsOutput[j]));
	}
}


void NeuralNetwork::TrainSample(int sampleNumber)
{
	int i,j,k;
	double sum;

	using namespace std;

	CalculateResult(sampleNumber);

	// calculez corectiile care trebuie aplicate - output
	for(j=0; j<numInput; j++)
	{
		for(i=0; i<numHidden; i++)
			correctOutput[j][i]+=resultsHidden[i]*(resultsOutput[j]-samplesInput[sampleNumber][j])*resultsOutput[j]*(1-resultsOutput[j]);

		correctOutput[j][numHidden]+=(resultsOutput[j]-samplesInput[sampleNumber][j])*resultsOutput[j]*(1-resultsOutput[j]);
	}

	// calculez corectiile care trebuie aplicate - hidden
	for(j=0; j<numHidden; j++)
	{
		sum=0;
		for(k=0; k<numInput; k++)
			sum+=weightsOutput[k][j]*(resultsOutput[k]-samplesInput[sampleNumber][k])*resultsOutput[k]*(1-resultsOutput[k]);

		for(i=0; i<numInput; i++)
			correctHidden[j][i]+=samplesInput[sampleNumber][i]*sum*resultsHidden[j]*(1-resultsHidden[j]);

		correctHidden[j][numInput]+=sum*resultsHidden[j]*(1-resultsHidden[j]);
	}
}



NeuralNetwork::NeuralNetwork(int _numInput, int _numHidden)
{
	using namespace std;
	int i,j;

	time(&startTime);

	// Ca sa stiu daca am probleme:
	samplesInput=NULL;

	if((_numInput>0)&&(_numHidden>0))
	{
		srand((unsigned)time(0)); 

		numInput=_numInput;
		numHidden=_numHidden;
		
		// Alocare memorie pentru neuronii hidden
		weightsHidden=new double* [numHidden];
		correctHidden=new double* [numHidden];
		for(i=0; i<numHidden; i++)
		{
			weightsHidden[i]=new double [numInput+1];
			correctHidden[i]=new double [numInput+1];

			// Umplere weights cu random pentru neuronii hidden
			for(j=0; j<=numInput; j++)
				weightsHidden[i][j]=(2*frand()-1)*3/numInput;

		}

		// Alocare memorie pentru neuronii de output
		weightsOutput=new double* [numInput];
		correctOutput=new double* [numInput];

		for(i=0; i<numInput; i++)
		{
			weightsOutput[i]=new double [numHidden+1];
			correctOutput[i]=new double [numHidden+1];

			// Umplere weights cu random pentru neuronii output
			for(j=0; j<=numHidden; j++)
				weightsOutput[i][j]=(2*frand()-1)*3/numHidden;
		}

		// Alocare memorie pentru rezultate
		resultsHidden=new double [numHidden];
		resultsOutput=new double [numInput];
	}
	else
	{
		numInput=0;
		numHidden=0;
	}
}

NeuralNetwork::~NeuralNetwork()
{
	int i;

	// Eliberare memorie neuroni hidden
	for(i=0; i<numHidden; i++)
	{
		delete[] weightsHidden[i];
		delete[] correctHidden[i];
	}
	delete[] weightsHidden;
	delete[] correctHidden;

	// Eliberare memorie neuroni output
	for(i=0; i<numInput; i++)
	{
		delete[] weightsOutput[i];
		delete[] correctOutput[i];
	}
	delete[] weightsOutput;
	delete[] correctOutput;
	
	// Eliberare memorie rezultate
	delete[] resultsHidden;
	delete[] resultsOutput;
}

int NeuralNetwork::TrainigSample(double** _samplesInput, int _samplesCount, int _sampleEntries)
{
	using namespace std;

	int i,j;

	if(_sampleEntries!=numInput)
	{
		return -1;
	}
	
	if(samplesInput==NULL)
	{
		samplesInput=_samplesInput;
		samplesCount=_samplesCount;

		// Am noul samples... ii calculez sigma
		samplesSigma=0;

		for(i=0; i< samplesCount; i++)
			for(j=0; j<numInput; j++)
				samplesSigma+=samplesInput[i][j]*samplesInput[i][j];

		return 0;
	}

	return -1;
}

int NeuralNetwork::GetResults(double** results,int _resultsCount, int _resultsEntries)
{
	using namespace std;

	int i,j;

	if((_resultsCount==samplesCount)&&(_resultsEntries==numInput))
	{
		for(i=0; i<samplesCount; i++)
		{
			CalculateResult(i);
			for(j=0; j<numInput; j++)
				results[i][j]=resultsOutput[j];
		}
	}

	return 0;
}

int NeuralNetwork::Train(int max_iter, int max_seconds, int min_SNR, double start_ETA, int verboose)
{
	using namespace std;

	int i,j,cont=1,iter=1,SNR;

	// Control asupra evolutiei eta
	double eta=3;
	double lastError=0;
	double samplesError=0;
	int oscNumber=0;
	int lastSign=1;

	char semn[4]={'|','\\','-','/'};

	if(start_ETA!=0)
		eta=start_ETA;

	if(!verboose)
		cout<<" ";

	while(cont)
	{
		// Curatat valorile corectiilor
		for(j=0; j<numHidden; j++)
			for(i=0; i<=numInput; i++)
				correctHidden[j][i]=0;

		for(j=0; j<numInput; j++)
			for(i=0; i<=numHidden; i++)
				correctOutput[j][i]=0;

		samplesError=0;
		SNR=0;

		// Determinare corectii noi si eroare la sample-ul curent
		for(i=0; i<samplesCount; i++)
		{
			TrainSample(i);
			
			// Calcule erori (absoluta si SNR)
			for(j=0; j<numInput; j++)
			{
				samplesError+=(double)(resultsOutput[j]-samplesInput[i][j])*(double)(resultsOutput[j]-samplesInput[i][j]);
			}
		}

		SNR=(int)(10*log10(samplesSigma/samplesError));

		// Aplicat corectii noi
		for(j=0; j<numHidden; j++)
			for(i=0; i<=numInput; i++)
				weightsHidden[j][i]-=eta*correctHidden[j][i]/samplesCount;

		for(j=0; j<numInput; j++)
			for(i=0; i<=numHidden; i++)
				weightsOutput[j][i]-=eta*correctOutput[j][i]/samplesCount;

		// Daca e verbose, ii afisam starea
		if(verboose)
		{
			// Afisare
			cout<<"Traininig number "<<iter<<": Current absolute errror: "<<samplesError<<" SNR: "<<SNR<<" eta: "<<eta<<"\n";
		}
		else
		{
			cout<<"\b"<<semn[iter%4];
		}

		iter++;

		// Calcul ETA nou - antioscilatii
		if(lastError>samplesError)
		{
			if(lastSign==1)
			{
				if(oscNumber) oscNumber--;
			}
			else
				oscNumber++;
			
			lastSign=1;
		}
		else
		{
			oscNumber++;		
			lastSign=0;
		}
		lastError=samplesError;

		if(oscNumber>4)
		{
			oscNumber=0;
			eta=eta/1.1f;
		}

		// Conditii de oprire
		// numar iteratii
		if(max_iter)
			if(iter>max_iter)
				cont=0;
		
		// timp
		if(max_seconds)
		{
			time_t now;
			time(&now);

			if(difftime(now,startTime)>max_seconds-2)
				cont=0;

		}

		// SNR
		if(min_SNR)
			if(SNR>min_SNR)
				cont=0;

	}
	if(verboose)
	{
		cout<<"\nLearning Algorithm Statistics:\n";
		cout<<"Iterations:     "<<iter;
		cout<<"\nAbsolute error: "<<samplesError;
		cout<<"\nSNR:            "<<SNR;
		cout<<"\nTime:           "<<difftime(time(NULL),startTime)<<" seconds \n\n";
	}
	else
		cout<<"\b";

	return 0;
}


double** NeuralNetwork::GetHiddenWeights()
{
	return weightsHidden;
}
double** NeuralNetwork::GetOutputWeights()
{
	return weightsOutput;
}
double*	 NeuralNetwork::GetHiddenOutputs(int sampleNumber)
{
	CalculateResult(sampleNumber);
	return resultsHidden;
}

int NeuralNetwork::GetPartialResults(UWORD* hiddenOutput, double* resultLocation)
{
	int i,j;

	// calculez rezultatele neuronilor pe sample-ul acesta - output
	for(j=0; j<numInput; j++)
	{
		resultLocation[j]=0;

		for(i=0; i<numHidden; i++)
			resultLocation[j]+=weightsOutput[j][i]*((double)hiddenOutput[i])/255/255;

		resultLocation[j]+=weightsOutput[j][numHidden];

		// neuronii sunt sigmoizi ==> aplicam functia 1/(1+exp(-...))
		resultLocation[j]=1/(1+exp(-resultLocation[j]));
	}
	
	return 0;
}
