/**
 * 
 */
package evolution.mlp;

import java.util.LinkedList;
import java.util.Random;
import java.util.Map.Entry;

import evolution.mlp.functions.Tanh;
import evolution.mlp.neurons.Bias;
import evolution.mlp.neurons.Hidden;
import evolution.mlp.neurons.In;
import evolution.mlp.neurons.Neuron;
import evolution.mlp.neurons.Out;

/**
 * @author camille
 * 
 */
public class MLP {

	private Bias bias;
	private LinkedList<In> inputLayer;
	private LinkedList<Hidden> hiddenLayer;
	private LinkedList<Out> outputLayer;

	private MLP() {
		bias = new Bias();
		inputLayer = new LinkedList<In>();
		hiddenLayer = new LinkedList<Hidden>();
		outputLayer = new LinkedList<Out>();
	}

	public MLP(int inputLayerSize, int hiddenLayerSize, int outputLayerSize) {

		this();

		for (int i = 0; i < inputLayerSize; i++) {
			inputLayer.add(new In());
		}

		for (int i = 0; i < hiddenLayerSize; i++) {
			hiddenLayer.add(new Hidden(Tanh.getInstance()));
		}

		for (int i = 0; i < outputLayerSize; i++) {
			outputLayer.add(new Out());
		}
	}

	/**
	 * Random initialization of the {@link MLP} weights
	 */
	public void initConnexions() {

		Random random = new Random();

		for (Hidden n : hiddenLayer) {

			double w;

			w = random.nextDouble();
			n.addIn(bias, new Double(w));
			bias.addOut(n, new Double(w));

			for (In in : inputLayer) {
				w = random.nextGaussian();
				n.addIn(in, new Double(w));
				in.addOut(n, new Double(w));
			}

			for (Out out : outputLayer) {
				w = random.nextGaussian();
				n.addOut(out, new Double(w));
				out.addIn(n, new Double(w));
			}
		}
	}

	public void setInputs(LinkedList<Double> inputs) throws Exception {
		if (inputs.size() != inputLayer.size()) {
			throw new Exception("Inputs # != Input layer size");
		}

		int i = 0;

		for (Double input : inputs) {
			inputLayer.get(i).setInput(input);
			i++;
		}
	}

	@Override
	public MLP clone() {
		MLP clone = new MLP(this.inputLayer.size(), this.hiddenLayer.size(), this.outputLayer.size());

		for (In in : inputLayer) {
			int inInd = inputLayer.indexOf(in);
			clone.inputLayer.get(inInd).setInput(in.getInput());
		}

		for (Hidden h : hiddenLayer) {

			int nInd = hiddenLayer.indexOf(h);
			Hidden ch = clone.hiddenLayer.get(nInd);

			for (Entry<Neuron, Double> entry : h.getIn().entrySet()) {

				if (entry.getKey() == bias) {
					ch.getIn().put(clone.bias, entry.getValue());
					clone.bias.getOut().put(ch, entry.getValue());
				} else {
					int inInd = inputLayer.indexOf(entry.getKey());
					In cin = clone.inputLayer.get(inInd);

					ch.getIn().put(cin, entry.getValue());
					cin.getOut().put(ch, entry.getValue());
				}
			}

			for (Entry<Neuron, Double> entry : h.getOut().entrySet()) {
				int outInd = outputLayer.indexOf(entry.getKey());
				Out cout = clone.outputLayer.get(outInd);

				ch.getOut().put(cout, entry.getValue());
				cout.getIn().put(ch, entry.getValue());
			}
		}

		return clone;
	}

	public LinkedList<In> getInputLayer() {
		return inputLayer;
	}

	public LinkedList<Hidden> getHiddenLayer() {
		return hiddenLayer;
	}

	public LinkedList<Out> getOutputLayer() {
		return outputLayer;
	}

	public String toString(char separator) {
		String mlpStr = "Neuron";

		// First line (headers)
		for (In in : inputLayer) {
			mlpStr += separator + "input" + inputLayer.indexOf(in);
		}

		for (Out out : outputLayer) {
			mlpStr += separator + "output" + outputLayer.indexOf(out);
		}

		mlpStr += "\n";

		// Other lines
		for (Hidden hidden : hiddenLayer) {
			mlpStr += "hidden" + hiddenLayer.indexOf(hidden);

			for (In in : inputLayer) {
				mlpStr += separator + "" + hidden.getIn().get(in);
			}

			for (Out out : outputLayer) {
				mlpStr += separator + "" + hidden.getOut().get(out);
			}

			mlpStr += "\n";
		}

		return mlpStr;
	}

	@Override
	public String toString() {
		return toString('\t');
	}
}
