import java.util.ArrayList;

public class NeuralNet {
	
	protected ArrayList<ArrayList<Node>> network;
	
	//Create network based on the types of nodes specified in layers
	//Int type specifies the initial connection distribution
	public NeuralNet(String[][][] layers, int type) {
		network = new ArrayList<ArrayList<Node>>();
		for (int i = 0; i < layers.length; i++) {
			network.add(new ArrayList<Node>());
			for (int j = 0; j < layers[i].length; j++) {
				double[] c = {Double.parseDouble(layers[i][j][0]),
						Double.parseDouble(layers[i][j][1])};
				addNode(layers[i][j][2],c,i);
			}
		}
		if (type == 0) {
			//Connections from each layer to the next
			layeredConnections();
		} else if (type == 1) {
			//Connections from each node to all forward nodes
			fullConnections();
		} else if (type != 2) {
			//Type 2 == no initial connections
			System.err.println("Connection type "+type+" does not exist");
		}
	}
	
	public static void main(String[] args) {
		String[][] s = {{"B", "L", "L"},{"S"},{"S"}};
		NeuralNet n = new NeuralNet(quickCPPN(s),1);
		double[][][] x = {{{0,0},{0},{0}},{{0,1},{1},{0}},{{1,0},{1},{0}},{{1,1},{0},{0}}};
		double e;
		int i = 0;
		do {
			e = n.trainNetwork(x, 1.0, 0.0);
			System.out.println(e);
			i++;
		} while (e > .01);
		System.out.println("Iterations: "+i);
		for (int j = 0; j < x.length; j++) { 
			n.processInput(x[j][0]);
			double[] y = n.getOutputs();
			System.out.println(x[j][0][0]+" , "+x[j][0][1]+" : "+y[0]);
		}
	}
	
	//Single iteration of backprop, returns sum of squared errors on all training data.
	//Examples is the training data, which should be of the form {{in1,out1},{in2,out2},...}
	//where inx and outx are double[]
	public double trainNetwork(double[][][] examples, double learnRate, double momentum) {
		double t = 0;
		for (int i = 0; i < examples.length; i++) {
			processInput(examples[i][0]);
			//Reset output deltas
			for (int j = 0; j < network.get(network.size()-1).size(); j++) {
				network.get(network.size()-1).get(j).resetDelta();
			}
			//Compute error
			for (int j = 0; j < examples[i][1].length; j++) {
				int k = (int) examples[i][2][j];
				t += network.get(network.size()-1).get(k).computeError(examples[i][1][j]);
			}
			//Start at second to last layer and propagate the error backwards
			for (int j = network.size()-2; j >= 0 ; j--) {
				for (int k = 0; k < network.get(j).size(); k++) {
					network.get(j).get(k).propagateError();
				}
			}
		}
		//Update weights
		learnRate *= (1.0/examples.length);	
		for (int i = 0; i < network.size(); i++) {
			for (int j = 0; j < network.get(i).size(); j++) {
				network.get(i).get(j).updateWeights(learnRate, momentum, false);
			}
		}
    return t;
	}
	
	public void processInput(double[] input) {
		resetInputs();
		//Set inputs
		int k = 0;
		for (int i = 0; i < network.get(0).size(); i++) {
			if (!(network.get(0).get(i).getType().equals("B"))) {
				network.get(0).get(i).addInput(input[k]);
				k++;
			}
		}
		if (k != input.length) {
			System.err.println("Number of inputs and nodes does not match.");
		}
		
		//Compute results
		for (int i = 0; i < network.size(); i++) {
			for (int j = 0; j < network.get(i).size(); j++) {
				network.get(i).get(j).forwardPass();
			}
		}
	}
	
	public void resetInputs() {
		for (int i = 0; i < network.size(); i++) {
			for (int j = 0; j < network.get(i).size(); j++) {
				//Reset all inputs except BiasNode, which is always 1
				if (!(network.get(i).get(j) instanceof BiasNode)) {
					network.get(i).get(j).resetInput();
				}
			}
		}
	}

	public void resetMomentums() {
		for (int i = 0; i < network.size(); i++) {
			for (int j = 0; j < network.get(i).size(); j++) {
				network.get(i).get(j).resetMomentums();
			}
		}
	}
	
	public double[] getOutputs() {
		double[] out = new double[network.get(network.size()-1).size()];
		for (int i = 0; i < out.length; i++) {
			out[i] = network.get(network.size()-1).get(i).getOutput();
		}
		return out;
	}
	
	public void layeredConnections() {
		for (int i = 0; i < network.size()-1; i++) {//All layers except the last
			for (int j = 0; j < network.get(i).size(); j++) {//All nodes
				for (int k = 0; k < network.get(i+1).size(); k++) {//All nodes in the next layer
					//BiasNodes should not have incoming connections
					if (!(network.get(i+1).get(k) instanceof BiasNode)) {
						network.get(i).get(j).addConnection(network.get(i+1).get(k));
					}
				}
			}
		}
	}
	
	public void fullConnections() {
		for (int i = 0; i < network.size()-1; i++) {//All layers except the last
			for (int j = 0; j < network.get(i).size(); j++) {//All nodes
				for (int k = i+1; k < network.size(); k++) {//All all next layers
					for (int l = 0; l < network.get(k).size(); l++) {//All nodes
						//BiasNodes should not have incoming connections
						if (!(network.get(k).get(l) instanceof BiasNode)) {
							network.get(i).get(j).addConnection(network.get(k).get(l));
						}
					}
				}
			}
		}
	}
	
	public void addConnection(int l1, int n1, int l2, int n2) {
		network.get(l1).get(n1).addConnection(network.get(l2).get(n2));
	}
	
	public double evaluate(double[][][] examples) {
		double t = 0;
		for (int i = 0; i < examples.length; i++) {
			processInput(examples[i][0]);
			//Reset output deltas
			for (int j = 0; j < network.get(network.size()-1).size(); j++) {
				network.get(network.size()-1).get(j).resetDelta();
			}
			//Compute error
			for (int j = 0; j < examples[i][1].length; j++) {
				int k = (int) examples[i][2][j];
				t += network.get(network.size()-1).get(k).computeError(examples[i][1][j]);
			}
		}
		return t;
	}
	
	public int size() {
		return network.size();
	}
	
	public int size(int i) {
		return network.get(i).size();
	}
	
	public Node getNode(int i, int j) {
		return network.get(i).get(j);
	}
	
	//Based on the type of node specified by t add that node to layer l
	//Based on the type of node specified by t and coord c, add that node to layer l
	public void addNode(String t, double[] c, int l) {
		Node n;
		if (t.equals("S")) {
			n = new SigmoidNode(c);
		} else if (t.equals("G")) {
			n = new GaussianNode(c);
		} else if (t.equals("P")) {
			n = new PeriodicNode(c);
		} else if (t.equals("L")) {
			n = new LinearNode(c);
		} else if (t.equals("B")) {
			n = new BiasNode(c);
		} else if (t.equals("LC")) {
			n = new LinearClassifier(c);
		} else if (t.equals("SC")) {
			n = new SigmoidClassifier(c);
		} else if (t.equals("SS")) {
			n = new SignedSigmoid(c);
		}  else {
			System.err.println("Node type "+t+" does not exist");
			n = new SigmoidNode(c);
		}
		network.get(l).add(n);
	}
	
	//Function to quickly create the layout for a substrate type neural net
	public static String[][][] quickSubstrate(String[][] layers) {
		String[][][] x = new String[layers.length][][];
		for (int i = 0; i < layers.length; i++) {
			int a = Integer.parseInt(layers[i][0]);
			int b = Integer.parseInt(layers[i][1]);
			x[i] = new String[a*b][3];
			for (int j = 0; j < a; j++) {
				for (int k = 0; k < b; k++) {
					x[i][j*b+k][0] = ""+j;
					x[i][j*b+k][1] = ""+k;
					x[i][j*b+k][2] = layers[i][2];
				}
			}
		}
		return x;
	}
	
	//Function to quickly create the layout for a CPPN type neural net
	public static String[][][] quickCPPN(String[][] layers) {
		String[][][] x = new String[layers.length][][];
		for (int i = 0; i < layers.length; i++) {
			x[i] = new String[layers[i].length][3];
			for (int j = 0; j < layers[i].length; j++) {
				x[i][j][0] = ""+0;
				x[i][j][1] = ""+j;
				x[i][j][2] = layers[i][j];
			}
		}
		return x;
	}
	
	//Specific node types as protected classes. All of the general backprop work
	//is done in Node.java
	
	protected class SigmoidNode extends Node {
		
		public SigmoidNode(double[] c) {
			super(c);
		}
		
		public double activationFunc(double x) {
			return 1 / (1 + Math.exp(-x));
		}

		public double activationDeriv(double x) {
			return activationFunc(x)*(1-activationFunc(x));
		}
		
		public String getType() {
			return "S";
		}
	}
	
	protected class SigmoidClassifier extends SigmoidNode {
		public SigmoidClassifier(double[] c) {
			super(c);
		}
		
		public double computeError(double target) {
			delta = (target - output);
			return -(target*Math.log(output)+(1-target)*Math.log(1-output));
		}
	}
	
	protected class SignedSigmoid extends Node {
		
    double range = 6;

		public SignedSigmoid(double[] c) {
			super(c);
		}
		
		public double activationFunc(double x) {
			return range / (1 + Math.exp(-x)) -(range/2);
		}

		public double activationDeriv(double x) {
			return (range*Math.exp(x))/(Math.pow(1 + Math.exp(x),2));
		}
		
		public String getType() {
			return "SS";
		}
	}
	
	protected class GaussianNode extends Node {
		
		public GaussianNode(double[] c) {
			super(c);
		}
		
		public double activationFunc(double x) {
			return Math.exp(-x*x);
		}

		public double activationDeriv(double x) {
			return -2*activationFunc(x)*x;
		}
		
		public String getType() {
			return "G";
		}
	}

	protected class PeriodicNode extends Node {
		
		public PeriodicNode(double[] c) {
			super(c);
		}
		
		public double activationFunc(double x) {
			return Math.sin(x);
		}
	
		public double activationDeriv(double x) {
			return Math.cos(x);
		}
		
		public String getType() {
			return "P";
		}
	}
	
	//Note: Also functions as input node
	protected class LinearNode extends Node {
		
		public LinearNode(double[] c) {
			super(c);
		}
		
		public double activationFunc(double x) {
			return x;
		}

		public double activationDeriv(double x) {
			return 1;
		}
		
		public String getType() {
			return "L";
		}
	}
	
	protected class LinearClassifier extends LinearNode {
		public LinearClassifier(double[] c) {
			super(c);
		}
		
		public double computeError(double target) {
			delta = (target - output);
			return -(target*Math.log(output)+(1-target)*Math.log(1-output));
		}
	}
	
	protected class BiasNode extends LinearNode {
		
		public BiasNode(double[] c) {
			super(c);
			//Bias node input should always be 1
			addInput(1);
		}
		
		public String getType() {
			return "B";
		}
	}
}
