package edu.fiu.ann;

import java.util.Arrays;
import java.util.Random;


import edu.fiu.ann.costfunction.CostFunction;
import edu.fiu.ann.costfunction.SumErrorSquare;
import edu.fiu.ann.threshold.Sigmoid;
import edu.fiu.ann.threshold.ThresholdFunction;
import edu.fiu.ann.utility.Pair;
import edu.fiu.terminator.IterationTerminator;
import edu.fiu.terminator.TrainingTerminator;



/**
 * The three layers backprapagation neural network.
 *
 * @author Yexi Jiang (http://users.cis.fiu.edu/~yjian004/)
 * @date 2011-4-4
 */
public class ThreeLayersBackprapagation extends NeuralNetwork{
	
	protected double learningRate;
	protected int trainingTime = 1000;
	
	protected ThresholdFunction threshold;
	protected CostFunction costFunction;
	protected TrainingTerminator terminator;
	
	protected int inputLayerSize;
	protected int hiddenLayerSize;
	protected int outputLayerSize;

	protected double[][] weightsInputToHidden;	//	first dimension is the input neuron index, second dimension is the hidden neuron index
	protected double[][] weightsHiddenToOutput;	//	first dimension is the hidden neuron index, second dimension is the output index
	
	/**
	 * Construct the three layer neural network according to the parameters.
	 * @param inputNeuronSize	number of neurons in input layer
	 * @param hiddenNeuronSize	number of neurons in hidden layer
	 * @param outputNeuronSize	number of neurons in output layer, usually 1
	 * @param initType			the weight initial type, i.e. NeuralNetwork.Gaussian, NeuralNetwork.Uniform
	 * @param rate				the learning rate of weight update
	 */
	public ThreeLayersBackprapagation(int inputNeuronSize, int hiddenNeuronSize, int outputNeuronSize, String initType, double rate){
		learningRate = rate;
		
		inputLayerSize = inputNeuronSize + 1;	//	include 1 bias neuron
		hiddenLayerSize = hiddenNeuronSize + 1;	//	include 1 bias neuron
		outputLayerSize = outputNeuronSize;
		
		threshold =null;
		costFunction = null;
		terminator = null;
		
		//	initialize weights for input layer
		weightsInputToHidden = new double[inputLayerSize][hiddenLayerSize];
		Random random = new Random();
		for(int i = 0; i < inputLayerSize; ++i){
			for(int j = 0; j < hiddenLayerSize; ++j){
				if(initType.equals(GUASSIAN)){
					weightsInputToHidden[i][j] = random.nextGaussian();
				}
				else{
					weightsInputToHidden[i][j] = random.nextDouble();
				}
			}
		}
		
		//	initialize weights for hidden layer
		weightsHiddenToOutput = new double[hiddenLayerSize][outputLayerSize];
		for(int i = 0; i < hiddenLayerSize; ++i){
			for(int j = 0; j < outputLayerSize; ++j){
				if(initType.equals(GUASSIAN)){
					weightsHiddenToOutput[i][j] = random.nextGaussian();
				}
				else{
					weightsHiddenToOutput[i][j] = random.nextDouble();
				}
			}
		}
	}
	
	/**
	 * Initialize the neural network by given the weights.
	 * @param inputNeuronSize		number of neurons in input layer
	 * @param hiddenNeuronSize		number of neurons in hidden layer
	 * @param outputNeuronSize		number of neurons in output layer, usually 1
	 * @param weightsInputToHidden	weights from input layer to hidden layer, the weights of bias neuron is included
	 * @param weightsHiddenToOutput	weights from hidden layer to output layer, the weights of bias neuron is included
	 * @param rate
	 */
	public ThreeLayersBackprapagation(int inputNeuronSize, int hiddenNeuronSize, int outputNeuronSize, 
			double[][] weightsInputToHidden, double[][] weightsHiddenToOutput, double rate){
		
		inputLayerSize = inputNeuronSize + 1;	//	include 1 bias neuron
		hiddenLayerSize = hiddenNeuronSize + 1;	//	include 1 bias neuron
		outputLayerSize = outputNeuronSize;
		
		threshold = new Sigmoid();
		costFunction = new SumErrorSquare();
		terminator = new IterationTerminator(trainingTime);
		
		this.weightsInputToHidden = weightsInputToHidden;
		this.weightsHiddenToOutput = weightsHiddenToOutput;
	}
	
	public void setTerminator(TrainingTerminator terminator){
		this.terminator = terminator;
	}
	
	@Override
	/**
	 * Training the neural networks with training data.
	 * @param trainingData	the normalized training data, all elements should be normalized between 0 and 1
	 */
	public void train(double[][] trainingData) {
		if(threshold == null){
			threshold = new Sigmoid();
		}
		if(costFunction == null){
			costFunction = new SumErrorSquare();
		}
		if(terminator == null){
			terminator = new IterationTerminator(trainingTime);
		}
		stochasticGradientDescentTraining(trainingData);
	}
	
	/**
	 * Learning neural network via stochastic gradient descent.
	 * @param trainingData	Training data stored in two-diemensional array, row for records, col for features.
	 */
	protected void stochasticGradientDescentTraining(double[][] trainingData){
		while(false == terminator.terminate()){
			for(double[] instance : trainingData){
				Pair<double[], double[]> outputs = trainUpdateInfo(instance);
				double[] outputLayerOutput = outputs.getKey();
				double[] hiddenLayerOutput = outputs.getValue();
				
				//	calculate delta between hidden layer and output layer
				double[] deltaHidden = new double[outputLayerSize];
				for(int i = 0; i < outputLayerSize; ++i){
					double realValue = instance[instance.length - 1];
					deltaHidden[i] = threshold.partialDerivative(outputLayerOutput[i]) * costFunction.partialDerivative(realValue, outputLayerOutput[i]);
				}
				
				//	calculate delta between input layer and hidden layer
				double[] deltaInput = new double[hiddenLayerSize];
				Arrays.fill(deltaInput, 0.0);
				for(int i = 0; i < hiddenLayerSize; ++i){
					for(int j = 0; j < outputLayerSize; ++j){
						deltaInput[i] += weightsHiddenToOutput[i][j] * deltaHidden[j];
					}
					deltaInput[i] *= hiddenLayerOutput[i] * (1 - hiddenLayerOutput[i]);
				}
				
				//	update weights between hidden layer and output layer
				for(int i = 0; i < hiddenLayerSize; ++i){
					for(int j = 0; j < outputLayerSize; ++j){
						weightsHiddenToOutput[i][j] += learningRate * deltaHidden[j] * hiddenLayerOutput[i];
					}
				}
				
				//	update weights between input layer and hidden layer
				for(int i = 0; i < inputLayerSize; ++i){
					if(i == 0){
						for(int j = 0; j < hiddenLayerSize; ++j){
							weightsInputToHidden[i][j] += learningRate * deltaInput[j];
						}
					}
					else{
						for(int j = 0; j < hiddenLayerSize; ++j){
							weightsInputToHidden[i][j] += learningRate * deltaInput[j] * instance[i - 1];
						}
					}
				}
			}
		}
	}

	@Override
	/**
	 * Update the weight between neurons in network according to the input instance. 
	 * For training, the dimension of input instance should be equal to
	 * the number of neurons in input size. 
	 * @param trainingInstance	the input instance
	 * @return	the pair with first element be the output of output layer
	 * and second element be the output of the hidden layer
	 */
	public Pair<double[], double[]> trainUpdateInfo(double[] trainingInstance) {
		if(trainingInstance.length != inputLayerSize){
			throw new Error("Input instance with incorrect dimension.");
		}
		
		//	calculate the hidden layer values
		double[] hiddenValues = new double[hiddenLayerSize];
		//	hiddenValues[0] is the bias neuron
		hiddenValues[0] = 1;
		for(int i = 1; i < hiddenLayerSize; ++i){
			hiddenValues[i] += weightsInputToHidden[0][i];
			for(int j = 1; j < inputLayerSize; ++j){
				hiddenValues[i] += weightsInputToHidden[j][i] * trainingInstance[j - 1];
			}
			hiddenValues[i] = threshold.f(hiddenValues[i]);
		}

		//	calculate the output layer value
		double[] outputValues = new double[outputLayerSize];
		Arrays.fill(outputValues, 0.0);
		
		for(int i = 0; i < outputLayerSize; ++i){
			for(int j = 0; j < hiddenLayerSize; ++j){
				outputValues[i] += weightsHiddenToOutput[j][i] * hiddenValues[j];
			}
			outputValues[i] = threshold.f(outputValues[i]);
		}
		
		return new Pair<double[], double[]>(outputValues, hiddenValues);
	}
	
	public void setTrainingTime(int time){
		this.trainingTime = time;
	}
	
	@Override
	/**
	 * Use neural network to get the output according to input.
	 * @param input	the input instance.
	 * @return	the output.
	 */
	public double[] output(double[] input) {
//		if(input.length <= inputLayerSize - 1){
//			throw new Error("Input instance with incorrect dimension. Input dimension:" 
//							+ input.length + ", required:" + (inputLayerSize + 1));
//		}
		
		//	calculate the hidden layer values
		double[] hiddenValues = new double[hiddenLayerSize];
		//	hiddenValues[0] is the bias neuron
		hiddenValues[0] = 1;
		for(int i = 1; i < hiddenLayerSize; ++i){
			hiddenValues[i] += weightsInputToHidden[0][i];
			for(int j = 1; j < inputLayerSize; ++j){
				hiddenValues[i] += weightsInputToHidden[j][i] * input[j - 1];
			}
			hiddenValues[i] = threshold.f(hiddenValues[i]);
		}

		//	calculate the output layer value
		double[] outputValues = new double[outputLayerSize];
		Arrays.fill(outputValues, 0.0);
		
		for(int i = 0; i < outputLayerSize; ++i){
			for(int j = 0; j < hiddenLayerSize; ++j){
				outputValues[i] += weightsHiddenToOutput[j][i] * hiddenValues[j];
			}
			outputValues[i] = threshold.f(outputValues[i]);
		}
		return outputValues;
	}
	
	public double[][] outputWeightsInputToHidden(){
		return this.weightsInputToHidden;
	}
	
	public double[][] outputWeightsHiddenToOutput(){
		return this.weightsHiddenToOutput;
	}
	
	public static void main(String[] args) throws Exception{
		int inputLayerSize = 3;
		int hiddenLayerSize = 3;
		int outputLayerSize = 1;
		double learningRate = 0.3;
		int dimension = 7;	//	dimension of input
		int trainingSize = 20;
		
		double[][] trainingData = {{0.4, 0.2, 0.9, 0.54}, {0.2, 0.5, 0.8, 0.56}, {0.4, 0.1, 0.7, 0.35},
									{0.1, 0.2, 0.3, 0.09}, {0.2, 0.2, 0.2, 0.08}, {0.5, 0.5, 0.5, 0.5},
									{0.5, 0.1, 0.3, 0.18}};
		double[] test = {0.4, 0.2, 0.6};
		
		NeuralNetwork nn = new ThreeLayersBackprapagation(inputLayerSize, hiddenLayerSize, outputLayerSize, NeuralNetwork.GUASSIAN, learningRate);
		nn.train(trainingData);
		double[] output = nn.output(test);
		System.out.println(output[0]);
	}

}
