/**
 * NeuralNetwork.java
 * 
 * Implementation of a Backpropagation Multi Layer Neural Network. Includes implementation of
 * solving XOR. Main function used to test out the training algorithm. 
 * 
 * Influenced by: http://takinginitiative.net/2008/04/03/basic-neural-network-tutorial-theory/
 * 
 */

package com.swin.energy;

import java.util.Arrays;
import java.util.Random;

public class NeuralNetwork {

	double learningCoefficient = 0.5;
	double momentum = 0.1;

	int counter = 0;
	
	int nHidden, nInput, nOutput;

	// Inputs or x values throughout the system
	double inputNodes[];
	double hiddenNodes[];
	double outputNodes[];

	// Weights
	double hiddenWeights[][];
	double outputWeights[][];

	// Error gradients
	double hiddenErrorGradient[];
	double outputErrorGradient[];

	double desiredOutputs[];

	// Change to weights
	double changeToHiddenWeights[][];
	double changeToOutputWeights[][];

	// NN only supports 3 layers, input, hidden and output
	public NeuralNetwork(int numberOfInputs, int numberOfHiddenNodes,
			int numberOfOutputNodes) {
		this.nHidden = numberOfHiddenNodes;

		this.nInput = numberOfInputs + 1; // +1 for bias node
		this.nOutput = numberOfOutputNodes;

		this.inputNodes = new double[nInput];
		this.hiddenNodes = new double[numberOfHiddenNodes];
		this.outputNodes = new double[numberOfOutputNodes];
		Arrays.fill(inputNodes, 1.0);
		Arrays.fill(hiddenNodes, 1.0);
		Arrays.fill(outputNodes, 1.0);

		this.hiddenWeights = new double[numberOfHiddenNodes][nInput];
		this.outputWeights = new double[numberOfOutputNodes][numberOfHiddenNodes];

		this.hiddenErrorGradient = new double[nHidden];
		this.outputErrorGradient = new double[nOutput];

		this.desiredOutputs = new double[nOutput];

		this.changeToHiddenWeights = new double[nHidden][nInput];
		this.changeToOutputWeights = new double[nOutput][nHidden];
	}

	public void initialiseWeights() {
		double max = 0.2;
		double min = -0.2;
		Random rand = new Random();
		for (int hidden = 0; hidden < nHidden; hidden++) {
			for (int input = 0; input < nInput; input++) {
				hiddenWeights[hidden][input] = (rand.nextDouble() * (max - min))
						+ min;
			}
		}
		max = 2.0;
		min = -2.0;
		for (int output = 0; output < nOutput; output++) {
			for (int hidden = 0; hidden < nHidden; hidden++) {
				outputWeights[output][hidden] = (rand.nextDouble() * (max - min))
						+ min;
			}
		}
	}
	
	public double[] forwardPropagation(double inputs[]) {

		assert (inputs.length == (inputNodes.length - 1));

		for (int inputIndex = 0; inputIndex < inputs.length; inputIndex++) {
			inputNodes[inputIndex] = inputs[inputIndex];
		}
		inputNodes[inputs.length] = 1.0;

		// Hidden layer
		for (int hiddenIndex = 0; hiddenIndex < nHidden; hiddenIndex++) {
			double summation = 0.0;
			for (int inputIndex = 0; inputIndex < inputNodes.length; inputIndex++) {
				summation += inputNodes[inputIndex]
						* hiddenWeights[hiddenIndex][inputIndex];
			}
			hiddenNodes[hiddenIndex] = activationFunction(summation);
		}

		// Output layer
		for (int outputIndex = 0; outputIndex < nOutput; outputIndex++) {
			double summation = 0.0;
			for (int hiddenIndex = 0; hiddenIndex < nHidden; hiddenIndex++) {
				summation += hiddenNodes[hiddenIndex]
						* outputWeights[outputIndex][hiddenIndex];
			}
			outputNodes[outputIndex] = activationFunction(summation);
		}
		return outputNodes;
	}

	public double backPropagation(double desiredOutputs[]) {
		this.desiredOutputs = desiredOutputs;

		// Output layer errors
		for (int outputIndex = 0; outputIndex < nOutput; outputIndex++) {
			double output = outputNodes[outputIndex];
			outputErrorGradient[outputIndex] = dActivationFunction(output)
					* (desiredOutputs[outputIndex] - output);

		}

		// Hidden layer errors
		for (int hiddenIndex = 0; hiddenIndex < nHidden; hiddenIndex++) {
			double errorSummation = 0.0;
			for (int outputIndex = 0; outputIndex < nOutput; outputIndex++) {
				errorSummation += outputErrorGradient[outputIndex]
						* outputWeights[outputIndex][hiddenIndex];
			}
			double hiddenY = hiddenNodes[hiddenIndex];
			hiddenErrorGradient[hiddenIndex] = dActivationFunction(hiddenY)
					* errorSummation;
		}

		double error = 0.0;

		for (int out = 0; out < outputNodes.length; out++) {
			error = error + 0.5
					* Math.pow(desiredOutputs[out] - outputNodes[out], 2);
		}
		return error;
	}

	public void updateWeights() {
		double sumOfHidden = sumArray(hiddenNodes);
		counter++;
		// Update output weights
		for (int h = 0; h < nHidden; h++) {
			System.out.print("Weight: " + counter+ " "+ h + " ");
			for (int o = 0; o < nOutput; o++) {
				double change = outputErrorGradient[o]
						* ratioOfInput(hiddenNodes[h], sumOfHidden);
				outputWeights[o][h] = outputWeights[o][h] + learningCoefficient
						* change + momentum * changeToOutputWeights[o][h];
				changeToOutputWeights[o][h] = change;
				
				System.out.print(outputWeights[o][h] + " " );
			}
			System.out.println();
		}

		double sumOfInputs = sumArray(inputNodes);
		// Update hidden weights
		for (int h = 0; h < nHidden; h++) {
			for (int i = 0; i < nInput; i++) {
				double change = hiddenErrorGradient[h]
						* ratioOfInput(inputNodes[i], sumOfInputs);
				hiddenWeights[h][i] = hiddenWeights[h][i] + learningCoefficient
						* change + momentum * changeToHiddenWeights[h][i];
				changeToHiddenWeights[h][i] = change;
			}
		}

	}

	private double sumArray(double array[]) {
		double sum = 0.0;
		for (double d : array) {
			sum += d;
		}
		return sum;
	}

	private double ratioOfInput(double input, double sum) {
		return input;
	}

	private double activationFunction(double x) {
		return 1 / (1 + Math.pow(Math.E, -1 * x));
	}

	private double dActivationFunction(double y) {
		return y * (1 - y);
	}

	public double[] trainNetwork(double inputs[][], double outputs[][],
			int nInterations) {
		// TODO: Need a check to make sure the number of inputs in each record
		// match the number of input nodes in the network
		initialiseWeights();
		for (int it = 0; it < nInterations; it++) {
			double error = 0.0;
			for (int i = 0; i < inputs.length; i++) {
				double input[] = inputs[i];
				double output[] = outputs[i];
				forwardPropagation(input);
				error += backPropagation(output);
				updateWeights();
			}
			if (it % 100 == 0) {
				System.out.println("error: " + error);
			}
		}
		return outputNodes;
	}

	public void XORexample() {
		NeuralNetwork nn = new NeuralNetwork(2, 2, 1);

		// training data
		double[][] in = { { 0.0, 0.0 }, { 0.0, 1.0 }, { 1.0, 0.0 },
				{ 1.0, 1.0 } };
		double[][] out = { { 0 }, { 1 }, { 1 }, { 0 } };

		// train nn
		nn.trainNetwork(in, out, 1000);

		// use nn
		for (int i = 0; i < in.length; i++) {
			double[] input = in[i];
			double actualOut[] = nn.forwardPropagation(input);
			System.out.println("Input: " + nn.arrayToString(input)
					+ " Output should be: " + out[i][0] + " Output is: "
					+ actualOut[0]);
		}
	}

	public static void main(String[] args) {
		double [][] input = {{0.0, 0.0, 4.0}, {0.0, 0.0, 2.0}, {0.0, 0.0, 3.0}, {0.0, 0.0, 1.0}, {0.0, 0.0, 4.0}, 
		                     {0.0, 0.0, 1.0}, {0.0, 0.0, 0.0}, {0.0, 0.0, 4.0}, {0.0, 0.0, 3.0}, {0.0, 0.0, 2.0}, 
		                     {0.0, 0.0, 4.0}, {0.0, 0.0, 4.0}, {0.0, 0.0, 4.0}, {0.0, 0.0, 0.0}, {0.0, 0.0, 0.0}, 
		                     {0.0, 0.0, 3.0}, {0.0, 0.0, 1.0}, {0.0, 0.0, 3.0}, {0.0, 0.0, 0.0}, {0.0, 0.0, 3.0}, 
		                     {1.0, 0.0, 4.0}, {1.0, 0.0, 2.0}, {1.0, 0.0, 3.0}, {1.0, 0.0, 1.0}, {1.0, 0.0, 4.0}, 
		                     {1.0, 0.0, 1.0}, {1.0, 0.0, 0.0}, {1.0, 0.0, 4.0}, {1.0, 0.0, 3.0}};
		
		double [][] eOutput = { {0.0}, {0.0}, {0.0}, {0.0}, {0.0}, 
		                        {0.0}, {1.0}, {0.0}, {0.0}, {0.0}, 
		                        {0.0}, {0.0}, {0.0}, {1.0}, {1.0}, 
		                        {0.0}, {0.0}, {1.0}, {1.0}, {0.0}, 
		                        {0.0}, {0.0}, {0.0}, {0.0}, {0.0}, 
		                        {1.0}, {1.0}, {0.0}, {0.0}};
		
		NeuralNetwork nn = new NeuralNetwork(3, 3, 1);
		nn.trainNetwork(input, eOutput, 1000);
		
		for (int i = 0; i < input.length; i++) {
			double[] input1 = input[i];
			double actualOut[] = nn.forwardPropagation(input1);
			System.out.println("Input: " + nn.arrayToString(input1)
					+ " Output should be: " + eOutput[i][0] + " Output is: "
					+ actualOut[0]);
		}
		
	}

	private String arrayToString(double[] myArray) {
		String myStr = "";
		for (int i = 0; i < myArray.length; i++) {
			myStr += myArray[i] + " ";
		}
		return myStr;
	}
}
