#include "BackPropagationTrainingANN.h"

BackPropagationTrainingANN::BackPropagationTrainingANN (int layerNum, int* neuronNums, int learnRate , int moment ){

	m_learningRate	= learnRate;
	m_momentum		= moment;
	
	//allocate memory for delta error values for each neuron - represented as 2D matrix - 
	m_deltaError = new double*[layerNum];
	for(int i = 0 ; i < layerNum ; i++){
		m_deltaError[i] = new double[neuronNums[i]];
	}

		//allocate memory for previous weights
	m_previousWeights = new double **[layerNum];
	for(int i = 1 ; i < layerNum ; i++){
		m_previousWeights[i] = new double*[neuronNums[i]];
	}
	for(int i = 1 ; i < layerNum ; i++){
		for(int j = 0 ; j < neuronNums[i] ; j++)
			m_previousWeights[i][j] = new double[neuronNums[i-1]+1];
	}

	for(int i = 1 ; i < layerNum ; i++){
		for(int j = 0 ; j <  neuronNums[i] ; j++){
			for(int k = 0 ; k < neuronNums[i-1]+1 ; k++){
				m_previousWeights[i][j][k]=(double)0.0;
			}
		}
	}
}
BackPropagationTrainingANN::~BackPropagationTrainingANN (){

	for(int i = 0 ; i < ANN->m_numberOfLayers ; i++)
		delete[] m_deltaError[i];
	delete[] m_deltaError;

	for(int i = 0 ; i < ANN->m_numberOfLayers ; i++)
		for(int j = 0 ; j < ANN->m_numOfNeurons[i] ; j++)
			delete[] m_previousWeights[i][j];
	for(int i = 0 ; i < ANN->m_numberOfLayers ; i++)
		delete[] ANN->m_neuronWeights[i];
	delete[] m_previousWeights;
}

void BackPropagationTrainingANN::backpropogate(double *input, double *target){

	double sum;

	//feed forward and update output values
	ANN->feedForward(input);

	//find delta error for each neuron in output layer
	for(int i = 0 ; i < ANN->m_numOfNeurons[ANN->m_numberOfLayers-1] ; i++){
		m_deltaError[ANN->m_numberOfLayers -1][i] = ANN->m_neuronValues[ANN->m_numberOfLayers-1][i] 
		* (1 - ANN->m_neuronValues[ANN->m_numberOfLayers-1][i]) 
			*  (target[i] - ANN->m_neuronValues[ANN->m_numberOfLayers-1][i]);
	}

	//find delta error for each neurons in hidden layers
	for(int i = ANN->m_numberOfLayers-2 ; i > 0 ; i--){

		for(int j = 0 ; j < ANN->m_numOfNeurons[i] ; j++){

			sum = 0.0;

			for(int k = 0 ; k < ANN->m_numOfNeurons[i+1] ; k++){
				sum += m_deltaError[i+1][k]*ANN->m_neuronWeights[i+1][k][j];

			}

			m_deltaError[i][j]=ANN->m_neuronValues[i][j]*(1-ANN->m_neuronValues[i][j])*sum;
		}
	}

	//apply momentum if momentum is set
	//	apply momentum ( does nothing if alpha=0 )

	if(m_momentum != 0){
		for(int i = 1 ; i < ANN->m_numberOfLayers ; i++){
			for(int j = 0 ; j < ANN->m_numOfNeurons[i] ; j++){
				for(int k = 0 ; k < ANN->m_numOfNeurons[i-1] ; k++){

					ANN->m_neuronWeights[i][j][k] += m_momentum * m_previousWeights[i][j][k];
				}
				ANN->m_neuronWeights[i][j][ANN->m_numOfNeurons[i-1]] += m_momentum * m_previousWeights[i][j][ANN->m_numOfNeurons[i-1]];
			}
		}
	}

	//	adjust weights using steepest descent	
	for(int i = 1 ; i < ANN->m_numberOfLayers ; i++){
		for(int j = 0 ; j < ANN->m_numOfNeurons[i] ; j++){
			for(int k = 0 ; k < ANN->m_numOfNeurons[i-1] ; k++){
				m_previousWeights[i][j][k] = m_learningRate * m_deltaError[i][j] * ANN->m_neuronValues[i-1][k];
				ANN->m_neuronWeights[i][j][k] += m_previousWeights[i][j][k];
			}
			m_previousWeights[i][j][ANN->m_numOfNeurons[i-1]] = m_learningRate * m_deltaError[i][j];
			ANN->m_neuronWeights[i][j][ANN->m_numOfNeurons[i-1]]  += m_previousWeights[i][j][ANN->m_numOfNeurons[i-1]];
		}
	}
}