package marek.ai;

import java.util.ArrayList;
import java.util.List;
import java.util.Random;

/**
 * Feed forward network with back propagation training using thresholds
 * @author Marek Jankowski
 * jankowski.marek@gmail.com
 * Any comments or questions are welcome.
 * 
 * License: GNU GPL v3: http://www.gnu.org/licenses/gpl.html
 *
 *Basic concepts: 
 *Network separated in layers : input, hidden(1 to n) and output containing linked neurons.
 *Neuron: contains value, incoming and outgoing synapses, activation function and derivative for error backpropagation
 *Synapse: contains neuron interconnections data, with corresponding weights
 *TrainingSet and Data: structure for containing learning data.
 **/
public class Backpropagation {
	private final static double LEARN_RATE = 0.5;
	private final static double ERROR_THRESHOLD = 0.001;
	private List<Layer> layers = new ArrayList<Layer>();
	private boolean trained = false; 

	public boolean isTrained() {
		return trained;
	}
	public List<Layer> getLayers() {
		return layers;
	}

	public enum LayerType {INPUT, HIDDEN, OUTPUT}

	public static class TrainingData {

		private double[] input;
		private double[] desired;
		
		public TrainingData(double[] input, double[] desired) {
			this.input = input;
			this.desired = desired;
		}
		
		public double[] getInput() {
			return input;
		}

		public double[] getDesired() {
			return desired;
		}
		
		public String toString() {
			String s = "Input: ";
			for (double i  : input) {
				s += i + ",";
			}
			s += "\r\nOutput: ";
			for (double d  : desired) {
				s += d + ",";
			}
			return s;
		}
	}
	
	public static class TrainingSet {
		private List<TrainingData> trainingDataSet = new ArrayList<TrainingData>();

		public TrainingSet() {
		}
		
		public TrainingSet(List<TrainingData> trainingDataSet) {
			this.trainingDataSet = trainingDataSet; 
		}

		public List<TrainingData> getTrainingDataSet() {
			return trainingDataSet;
		}
		
		public void addTrainingData(TrainingData trainingData) {
			trainingDataSet.add(trainingData);
		}
	}
	
	/**
	 * Initial impl: Sigmoid function
	 * */
	public static class ActivationFunction {
		public double activate(double value) {
			return 1/(1+ Math.exp(-value));
		}
		public double derivative(double value) {
			return value * (1 - value);
		}
	}

	public static class Layer {
		private int id;
		private LayerType type;
		private Layer nextLayer;
		private Layer prevLayer;

		private List<Neuron> neurons = new ArrayList<Neuron>();
		private List<Synapse> allLayerSynapses = new ArrayList<Synapse>();
		
		public List<Synapse> getAllLayerSynapses() {
			return allLayerSynapses;
		}

		public int getId() {
			return id;
		}

		public Layer getPrevLayer() {
			return prevLayer;
		}

		public LayerType getType() {
			return type;
		}

		public Layer getNextLayer() {
			return nextLayer;
		}

		public List<Neuron> getNeurons() {
			return neurons;
		}

		public Layer(Backpropagation network, LayerType type, int layerSize) {
			this.id = network.getLayers().size();
			this.type = type;
			for (int i = 0; i < layerSize; i++) {
				addNeuron(new Neuron(this));
			}
		}
		
		public void connectTo(Layer layer) {
			if (neurons.size() < 1) {
				throw new IllegalStateException("Layer has no neurons");
			}
			if (nextLayer != null) {
				throw new IllegalStateException("Layer already connected to Layer[" + nextLayer.id + "]");
			}
			if (layer.equals(this)) {
				throw new IllegalStateException("Cannot connect Layer to itself.");
			}
			nextLayer = layer;
			layer.prevLayer = this;
			for (Neuron n : neurons) {
				for (Neuron n2 : nextLayer.getNeurons()) {
					allLayerSynapses.add(n.connectTo(n2));
				}
			}
		}
		
		private void addNeuron(Neuron n) {
			neurons.add(n);
		}
		
		public void setValues(double[] values) {
			if (!type.equals(LayerType.INPUT)) {
				throw new IllegalStateException("Values can only be set for INPUT type layers");
			}
			if (values.length != neurons.size()) {
				throw new IllegalStateException("Neuron count [+ " + neurons.size() + "] must be same as value array length [" + values.length + "]");
			}
			for (int i = 0; i < values.length; i++) {
				neurons.get(i).setValue(values[i]);
			}
		}
		
		/*
		 * Only used for OUTPUT layer type
		 */
		public void setDesiredValues(double[] desiredValues) {
			if (!type.equals(LayerType.OUTPUT)) {
				throw new IllegalStateException("Desired values can only be set for OUTPUT type layers");
			}
			if (desiredValues.length != neurons.size()) {
				throw new IllegalStateException("Neuron size [+ " + neurons.size() + "] must be same as desired values size [" + desiredValues.length + "]");
			}
			for (int i = 0; i < desiredValues.length; i++) {
				neurons.get(i).setDesiredValue(desiredValues[i]);
			}
			
		}
		
		public void activate() {
			if (type.equals(LayerType.INPUT)) {
				throw new IllegalStateException("Input layers cannot be activated");
			}
			Layer layerToActivate = this;
			while (layerToActivate != null) {
				layerToActivate.activateAllNeurons();
				layerToActivate = layerToActivate.getNextLayer();
			}
		}

		private void activateAllNeurons() {
			for (Neuron n : getNeurons()) {
				n.activate();
			}
		}
		
		/*
		 * Activates back propagation back to higher layers
		 */
		public void backPropagate() {
			// recursively call all neurons
			// then call back propagate to previous layer
			Layer l = this;
			while (l != null && !l.getType().equals(LayerType.INPUT)) {
				for (Neuron n : l.getNeurons()) {
					n.backPropagateDelta();
				}
				l = l.getPrevLayer();
			}
		}
		
		/*
		 * Applies weight deltas computed during back-propagation
		 */
		public void applyWeightDeltas() {
			for (Synapse s : allLayerSynapses) {
				s.applyWeightDelta();
			}
			for (Neuron n : neurons) {
				n.applyThresholdDelta();
			}
		}
		
		public double getSumSquaredErrors() {
			if (!type.equals(LayerType.OUTPUT)) {
				throw new IllegalStateException("Only ouput Layers can compute error values");
			}
			double sum = 0.0;
			for (Neuron n: neurons) {
				sum += Math.pow(n.getError(), 2);
			}
			return sum/neurons.size();
		}
	}
	
	public static class Neuron {
		private int id;
		private double value;
		private Layer layer;
		private List<Synapse> outgoingSynapses = new ArrayList<Synapse>();
		private List<Synapse> incomingSynapses = new ArrayList<Synapse>();
		private ActivationFunction activationFunction = new ActivationFunction();
		private double errorGradiant;
		// Only used in output layer neurons.
		private double desiredValue;
		private double error;
		
		// Only used in output and hidden layers
		public final static double THRESHOLD = -1.0;
		private double thresholdWeight;
		private double thersholdDelta;
		
		public double getThersholdDelta() {
			return thersholdDelta;
		}
		
		public void applyThresholdDelta() {
			if (thersholdDelta == Double.NaN) {
				throw new IllegalStateException("Cannot apply threshold weight delta. Not yet computed.");
			}
			thresholdWeight += thersholdDelta;
			// Reinit delta
			thersholdDelta = Double.NaN;
		}

		public double getThresholdWeight() {
			return thresholdWeight;
		}
		public void setThresholdWeight(double thresholdWeight) {
			this.thresholdWeight = thresholdWeight;
		}
		public String toString() {
			return this.layer.getId() + "-" + id;
		}
		public double getError() {
			return error;
		}

		public double getDesiredValue() {
			return desiredValue;
		}

		public void setDesiredValue(double desiredValue) {
			this.desiredValue = desiredValue;
		}

		public double getValue() {
			return value;
		}

		public double getErrorGradiant() {
			return errorGradiant;
		}

		public void setValue(double value) {
			this.value = value;
		}

		public int getId() {
			return id;
		}

		public Layer getLayer() {
			return layer;
		}

		public List<Synapse> getOutgoingSynapses() {
			return outgoingSynapses;
		}

		public ActivationFunction getActivationFunction() {
			return activationFunction;
		}

		public Neuron(Layer layer) {
			this.layer = layer;
			id = layer.getNeurons().size();
		}
		
		public Synapse connectTo(Neuron n) {
			Synapse s = new Synapse();
			s.setFrom(this);
			s.setTo(n);
			this.outgoingSynapses.add(s);
			n.incomingSynapses.add(s);
			return s;
		}
		
		public Synapse getSynapseTo(Neuron n) {
			for (Synapse s : outgoingSynapses) {
				if (s.getTo().equals(n)) {
					return s;
				}
			}
			throw new RuntimeException("Could not find synapse from neuron " + getId() + " to neuron " + n.getId());
		}

		public Synapse getSynapseFrom(Neuron n) {
			for (Synapse s : incomingSynapses) {
				if (s.getFrom().equals(n)) {
					return s;
				}
			}
			throw new RuntimeException("Could not find synapse from neuron " + n.getId() + " to neuron " + getId());
		}

		public boolean equals(Object o) {
			if (!(o instanceof Neuron)) {
				return false;
			}
			Neuron n = (Neuron) o;
			return n.getId() == this.getId() && this.getLayer().getId() == n.getLayer().getId();
		}
		
		public void activate() {
			if (layer.getType().equals(LayerType.INPUT)) {
				throw new IllegalStateException("INPUT neurons cannot be activated");
			}
			double sum = 0.0;
			// Get all input neurons, sum and activate
			for (Synapse s : incomingSynapses) {
				sum += s.getFrom().getValue() * s.getWeight();
			}
			value = activationFunction.activate(sum - thresholdWeight);
			error = desiredValue - value;
		}
		
		public void backPropagateDelta() {
			if (layer.getType().equals(LayerType.OUTPUT)) {
				errorGradiant = activationFunction.derivative(value) * error;
				computeDeltas();
			} else if (layer.getType().equals(LayerType.HIDDEN)) {
				double sumErrGradiantWeightProduct = 0.0;
				for (Synapse s : outgoingSynapses) {
					sumErrGradiantWeightProduct += s.getTo().getErrorGradiant() * s.getWeight();
				}
				errorGradiant = activationFunction.derivative(value) * sumErrGradiantWeightProduct;
				computeDeltas();
			}
		}
		
		private void computeDeltas() {
			for (Synapse s : incomingSynapses) {
				double delta = LEARN_RATE * s.getFrom().getValue() * errorGradiant;
				s.setDelta(delta);
			}
			thersholdDelta = LEARN_RATE * THRESHOLD * errorGradiant;
		}

	}
	
	public static class Synapse {
		public double getWeight() {
			return weight;
		}

		public void setWeight(double weight) {
			this.weight = weight;
		}

		public Neuron getFrom() {
			return from;
		}

		public void setFrom(Neuron from) {
			this.from = from;
		}

		public Neuron getTo() {
			return to;
		}

		public void setTo(Neuron to) {
			this.to = to;
		}

		
		public double getDelta() {
			return delta;
		}

		public void setDelta(double delta) {
			this.delta = delta;
		}

		public void applyWeightDelta() {
			if (delta == Double.NaN) {
				throw new IllegalStateException("Cannot apply weight delta. Not yet computed.");
			}
			weight += delta;
			// Reinit delta
			delta = Double.NaN;
		}
		
		// Detla used in the weight adjustment phase. Default to NaN.
		private double delta = Double.NaN;
		private double weight = 0;
		private Neuron from;
		private Neuron to;
	}
	private void initWeights() {
		Random r = new Random(System.currentTimeMillis());
		for (Layer l : layers) {
			for (Synapse s : l.getAllLayerSynapses()) {
// Test weight init
//				if (l.getId() == 0.0 && s.getFrom().getId() == 0.0 && s.getTo().getId() == 0.0)
//					s.setWeight(0.5);
//				else if (l.getId() == 0.0 && s.getFrom().getId() == 0.0 && s.getTo().getId() == 1.0)
//					s.setWeight(0.9);
//				else if (l.getId() == 0.0 && s.getFrom().getId() == 1.0 && s.getTo().getId() == 0.0)
//					s.setWeight(0.4);
//				else if (l.getId() == 0.0 && s.getFrom().getId() == 1.0 && s.getTo().getId() == 1.0)
//					s.setWeight(1.0);
//				else if (l.getId() == 1.0 && s.getFrom().getId() == 0.0 && s.getTo().getId() == 0.0)
//					s.setWeight(-1.2);
//				else if (l.getId() == 1.0 && s.getFrom().getId() == 1.0 && s.getTo().getId() == 0.0)
//					s.setWeight(1.1);
				
				s.setWeight(r.nextDouble() * (r.nextBoolean() ? -1 : 1));
			}

			// Init threshold weights
			for (Neuron n : l.getNeurons()) {
// Test weight init
//				if (l.getType().equals(LayerType.INPUT))
//					continue;
//				if (l.getId() == 1.0 && n.getId() == 0.0) 
//					n.setThresholdWeight(0.8);
//				else if (l.getId() == 1.0 && n.getId() == 1.0) 
//					n.setThresholdWeight(-0.1);
//				else if (l.getId() == 2.0 && n.getId() == 0.0) 
//					n.setThresholdWeight(0.3);
				n.setThresholdWeight(r.nextDouble() * (r.nextBoolean() ? -1 : 1));
			}
		}
	}
	public void train(TrainingSet trainingSet) {
		if (getLayers().size() < 2) {
			throw new IllegalStateException("Need at least 2 layers in network");
		}
		initWeights();
		double sumSqrError = Double.MAX_VALUE;
		Layer output = getOutputLayer();
		int epoch = 0;
		while (sumSqrError > ERROR_THRESHOLD) { 
			System.out.println("==========================================");
			System.out.println("Running epoch #" + epoch++ );
			for (TrainingData trainingData : trainingSet.getTrainingDataSet()) {
				//System.out.println("Training data:\r\n" + trainingData );
				// Init with traindingData input values
				Layer input = getInputLayer();
				input.setValues(trainingData.getInput());
				
				// Init with trainingData desired values;
				output.setDesiredValues(trainingData.getDesired());
				
				// Get first hidden layer
				Layer hidden = getFirstHiddenLayer();
				
				//Activate network, feed forward through all layers (hidden to output)
				hidden.activate();
				
				//Backpropagate
				output.backPropagate();
				
				// Apply computed weight deltas computed during back propagation
				for (Layer l : layers) {
					l.applyWeightDeltas();
				}
			}
			sumSqrError = output.getSumSquaredErrors();
			System.out.println("Error: " + sumSqrError );
			System.out.println("==========================================");
		}
		trained = true;
		
	}
	
	public Layer getInputLayer() {
		Layer input = layers.get(0);
		if (!input.getType().equals(LayerType.INPUT)) {
			throw new IllegalStateException("The first layer should be an input layer.");
		}
		return input;
	}
	
	public Layer getFirstHiddenLayer() {
		Layer hidden = layers.get(1);
		if (!hidden.getType().equals(LayerType.HIDDEN)) {
			throw new IllegalStateException("The second layer should be a hidden layer.");
		}
		return hidden;
	}
	
	public Layer getOutputLayer() {
		Layer out = layers.get(layers.size()-1);
		if (!out.getType().equals(LayerType.OUTPUT)) {
			throw new IllegalStateException("The last layer should be an output layer.");
		}
		return out;
	}
	
	public void reset() {
		layers.clear();
	}
	
	public Layer addLayer(LayerType type, int size) {
		Layer l = new Layer(this, type, size);
		layers.add(l);
		return l;
	}
	
	public double[] test(double[] input) {
		if (!isTrained()) {
			throw new IllegalStateException("Cannot test data, network has not been trained yet.");
		}
		Layer il = getInputLayer();
		il.setValues(input);
		getFirstHiddenLayer().activate();
		List<Neuron> outNeurons = getOutputLayer().getNeurons();
		double[] out = new double[outNeurons.size()];
		for (int i = 0; i < out.length; i++) {
			out[i] = outNeurons.get(i).getValue();
		}
		return out;
		
	}
	
	private void printData(double[] testData, double[] result) {
		System.out.print("Test Data: ");
		for (int i = 0; i < testData.length; i++) {
			System.out.print(testData[i]);
			if (i+1 < testData.length) {
				System.out.print(", ");
			}
		}
		System.out.println();
		System.out.print("Result: ");
		for (int i = 0; i < result.length; i++) {
			System.out.print(result[i]);
			if (i+1 < result.length) {
				System.out.print(", ");
			}
		}
		System.out.println();
	}
	
	public void testAndPrintData(double[] testData) {
		double[] result = test(testData);
		printData(testData, result);
	}
	
	public static void main(String[] args) {
		// Test backpropagation network with XOR function
		TrainingSet trainingSet = new TrainingSet();
		trainingSet.addTrainingData(new TrainingData(new double[]{1.0,1.0},new double[]{0.0}));
		trainingSet.addTrainingData(new TrainingData(new double[]{0.0,1.0},new double[]{1.0}));
		trainingSet.addTrainingData(new TrainingData(new double[]{1.0,0.0},new double[]{1.0}));
		trainingSet.addTrainingData(new TrainingData(new double[]{0.0,0.0},new double[]{0.0}));
		
		Backpropagation network = new Backpropagation();
		// Create layers
		Layer input = network.addLayer(LayerType.INPUT, 2);
		Layer hidden = network.addLayer(LayerType.HIDDEN, 5);
		Layer hidden2 = network.addLayer(LayerType.HIDDEN, 10);
		Layer output = network.addLayer(LayerType.OUTPUT, 1);
		// Connect layers
		input.connectTo(hidden);
		hidden.connectTo(hidden2);
		hidden2.connectTo(output);
		
		network.train(trainingSet);

		network.testAndPrintData(new double[]{0.0,0.0});
		network.testAndPrintData(new double[]{1.0,0.0});
	}
}
