package com.yullage.ann.network.bp;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;

public class NeuralNetwork {
	static {
		Locale.setDefault(Locale.ENGLISH);
	}
	final static Charset ENCODING = StandardCharsets.UTF_8;

	final boolean isTrained = false;
	final DecimalFormat df;
	final Random rand = new Random();
	final ArrayList<Neuron> inputLayer = new ArrayList<Neuron>();
	final ArrayList<Neuron> hiddenLayer1 = new ArrayList<Neuron>();
	final ArrayList<Neuron> hiddenLayer2 = new ArrayList<Neuron>();
	final ArrayList<Neuron> outputLayer = new ArrayList<Neuron>();
	final Neuron bias = new Neuron();
	final int[] layers;
	final double epsilon = 0; // 0.00000000001f;

	String networkName = "UnnamedNetwork";

	int maxSteps = 5000;
	double minError = 0.001;

	double activeThreshold = 0.5;
	double randomWeightMultiplier = 1;
	double learningRate = 0.9f;
	double momentum = 0.7f;

	// Inputs for training
	List<Double[]> inputs;

	// Corresponding training outputs
	List<Double[]> expectedOutputs;
	List<Double[]> resultOutputs;
	Double output[];

	// Error history
	List<Double[]> errorHistory = new ArrayList<Double[]>();

	// for weight update all
	final HashMap<String, Double> weightUpdate = new HashMap<String, Double>();

	public String getNetworkName() {
		return networkName;
	}

	public void setNetworkName(String networkName) {
		this.networkName = networkName;
	}

	public NeuralNetwork(int input, int hidden1, int hidden2, int output) {
		this.layers = new int[] { input, hidden1, hidden2, output };
		df = new DecimalFormat("#.0#");

		/**
		 * Create all neurons and connections Connections are created in the neuron class
		 */
		bias.setActiveThreshold(activeThreshold);

		for (int i = 0; i < layers.length; i++) {
			if (i == 0) { // input layer
				for (int j = 0; j < layers[i]; j++) {
					Neuron neuron = new Neuron();
					neuron.setActiveThreshold(activeThreshold);
					inputLayer.add(neuron);
				}
			} else if (i == 1) { // hidden layer 1
				for (int j = 0; j < layers[i]; j++) {
					Neuron neuron = new Neuron();
					neuron.addInConnectionsS(inputLayer);
					neuron.addBiasConnection(bias);
					neuron.setActiveThreshold(activeThreshold);
					hiddenLayer1.add(neuron);
				}
			} else if (i == 2) { // hidden layer 2
				for (int j = 0; j < layers[i]; j++) {
					Neuron neuron = new Neuron();
					neuron.addInConnectionsS(hiddenLayer1);
					neuron.addBiasConnection(bias);
					neuron.setActiveThreshold(activeThreshold);
					hiddenLayer2.add(neuron);
				}
			}

			else if (i == 3) { // output layer
				for (int j = 0; j < layers[i]; j++) {
					Neuron neuron = new Neuron();
					neuron.addInConnectionsS(hiddenLayer2);
					neuron.addBiasConnection(bias);
					neuron.setActiveThreshold(activeThreshold);
					outputLayer.add(neuron);
				}
			} else {
				System.out.println("!Error NeuralNetwork init");
			}
		}

		// initialize random weights
		for (Neuron neuron : hiddenLayer1) {
			ArrayList<Connection> connections = neuron.getAllInConnections();
			for (Connection conn : connections) {
				double newWeight = getRandom();
				conn.setWeight(newWeight);
			}
		}

		for (Neuron neuron : hiddenLayer2) {
			ArrayList<Connection> connections = neuron.getAllInConnections();
			for (Connection conn : connections) {
				double newWeight = getRandom();
				conn.setWeight(newWeight);
			}
		}

		for (Neuron neuron : outputLayer) {
			ArrayList<Connection> connections = neuron.getAllInConnections();
			for (Connection conn : connections) {
				double newWeight = getRandom();
				conn.setWeight(newWeight);
			}
		}

		// reset id counters
		Neuron.counter = 0;
		Connection.counter = 0;

		if (isTrained) {
			trainedWeights();
			updateAllWeights();
		}
	}

	public void loadTrainingData(String fileName) {
		inputs = new ArrayList<Double[]>();
		expectedOutputs = new ArrayList<Double[]>();
		resultOutputs = new ArrayList<Double[]>();

		String[] tokens;

		try {
			FileInputStream fstream = new FileInputStream(fileName);

			// Get the object of DataInputStream
			DataInputStream in = new DataInputStream(fstream);
			BufferedReader br = new BufferedReader(new InputStreamReader(in));

			// Read File Line By Line
			String strLine;
			while ((strLine = br.readLine()) != null) {
				if (strLine.trim().equals("")) {
					continue;
				}
				tokens = strLine.trim().split("\t");

				// Inputs
				inputs.add(new Double[] { Double.parseDouble(tokens[0]), Double.parseDouble(tokens[1]) });

				// Expected output error check, normalize value ]0;1[
				Double[] expectedOutput = new Double[] { Double.parseDouble(tokens[2]) };
				for (int i = 0; i < expectedOutput.length; i++) {
					double d = expectedOutput[i];
					if (d < 0 || d > 1) {
						if (d < 0)
							expectedOutput[i] = 0 + epsilon;
						else
							expectedOutput[i] = 1 - epsilon;
					}
				}
				expectedOutputs.add(expectedOutput);

				// Dummy initialization
				resultOutputs.add(new Double[] { -1.0 });
			}

			in.close();
		} catch (Exception e) {
			System.err.println("Error: " + e.getMessage());
		}
	}

	public double getRandomWeightMultiplier() {
		return randomWeightMultiplier;
	}

	public void setRandomWeightMultiplier(double randomWeightMultiplier) {
		this.randomWeightMultiplier = randomWeightMultiplier;
	}

	public double getLearningRate() {
		return learningRate;
	}

	public void setLearningRate(double learningRate) {
		this.learningRate = learningRate;
	}

	public double getMomentum() {
		return momentum;
	}

	public void setMomentum(double momentum) {
		this.momentum = momentum;
	}

	// random
	double getRandom() {
		return randomWeightMultiplier * (rand.nextDouble() * 2 - 1); // [-1;1[
	}

	/**
	 * 
	 * @param inputs
	 *            There is equally many neurons in the input layer as there are in input variables
	 */
	public void setInput(Double inputs[]) {
		for (int i = 0; i < inputLayer.size(); i++) {
			inputLayer.get(i).setOutput(inputs[i]);
		}
	}

	public Double[] getOutput() {
		Double[] outputs = new Double[outputLayer.size()];

		for (int i = 0; i < outputLayer.size(); i++) {
			outputs[i] = outputLayer.get(i).getOutput();
		}

		return outputs;
	}

	/**
	 * Calculate the output of the neural network based on the input The forward operation
	 */
	public void activate() {
		for (Neuron n : hiddenLayer1) {
			n.calculateOutput();
		}

		for (Neuron n : hiddenLayer2) {
			n.calculateOutput();
		}

		for (Neuron n : outputLayer) {
			n.calculateOutput();
		}
	}

	/**
	 * all output propagate back
	 * 
	 * @param expectedOutput
	 *            first calculate the partial derivative of the error with respect to each of the weight leading into
	 *            the output neurons bias is also updated here
	 */
	public void applyBackpropagation(Double expectedOutput[]) {
		Map<Integer, Double> errorPartialDerivative = new HashMap<Integer, Double>();

		int i = 0;
		for (Neuron n : outputLayer) {
			double ak = n.getOutput();
			double desiredOutput = expectedOutput[i];
			errorPartialDerivative.put(n.id, -ak * (1 - ak) * (desiredOutput - ak));

			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				double ai = con.leftNeuron.getOutput();
				double deltaWeight = -learningRate * ai * errorPartialDerivative.get(n.id);
				double newWeight = con.getWeight() + deltaWeight;
				con.setDeltaWeight(deltaWeight);
				con.setWeight(newWeight + momentum * con.getPrevDeltaWeight());
			}
			i++;
		}

		// update weights for the hidden layer
		for (Neuron n : hiddenLayer2) {
			double ak = n.getOutput();

			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				double sumKoutputs = 0;
				for (Neuron out_neu : outputLayer) {
					double wjk = out_neu.getConnection(n.id).getWeight();
					sumKoutputs = sumKoutputs + (errorPartialDerivative.get(out_neu.id) * wjk);
				}
				errorPartialDerivative.put(n.id, ak * (1 - ak) * sumKoutputs);

				double ai = con.leftNeuron.getOutput();
				double deltaWeight = -learningRate * ai * errorPartialDerivative.get(n.id);
				double newWeight = con.getWeight() + deltaWeight;
				con.setDeltaWeight(deltaWeight);
				con.setWeight(newWeight + momentum * con.getPrevDeltaWeight());
			}
		}

		for (Neuron n : hiddenLayer1) {
			double ak = n.getOutput();

			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				double sumKoutputs = 0;
				for (Neuron out_neu : hiddenLayer2) {
					double wjk = out_neu.getConnection(n.id).getWeight();
					sumKoutputs = sumKoutputs + (errorPartialDerivative.get(out_neu.id) * wjk);
				}
				errorPartialDerivative.put(n.id, ak * (1 - ak) * sumKoutputs);

				double ai = con.leftNeuron.getOutput();
				double deltaWeight = -learningRate * ai * errorPartialDerivative.get(n.id);
				double newWeight = con.getWeight() + deltaWeight;
				con.setDeltaWeight(deltaWeight);
				con.setWeight(newWeight + momentum * con.getPrevDeltaWeight());
			}
		}
	}

	public void run(int maxSteps, double minError) {
		this.maxSteps = maxSteps;
		this.minError = minError;

		int i;
		double errorSE = 1;
		double errorBinary = 1;

		// Train neural network until minError reached or maxSteps exceeded
		for (i = 0; i < maxSteps && errorSE > minError; i++) {
			errorSE = 0;
			errorBinary = 0;

			// Reset mix detect
			for (Neuron n : hiddenLayer1) {
				n.resetMixDetect();
			}

			for (Neuron n : hiddenLayer2) {
				n.resetMixDetect();
			}

			for (Neuron n : outputLayer) {
				n.resetMixDetect();
			}

			// Do training
			for (int p = 0; p < inputs.size(); p++) {
				setInput(inputs.get(p));

				activate();

				output = getOutput();
				resultOutputs.set(p, output);

				for (int j = 0; j < expectedOutputs.get(p).length; j++) {
					// Square error
					double err = Math.pow(output[j] - expectedOutputs.get(p)[j], 2);
					errorSE += err;

					// Binary error
					double normalizedOutput = (output[j] >= activeThreshold ? 1.0 : 0);
					if (normalizedOutput != expectedOutputs.get(p)[j]) {
						errorBinary += 1;
					}
				}

				applyBackpropagation(expectedOutputs.get(p));
			}

			if ((i % 100 == 0) || (i == (maxSteps - 1) || (errorSE <= minError))) {
				System.out.println("Iteration: " + i + "\t\tSquared Error: " + errorSE + "\t\tBinary Error: "
						+ errorBinary);
				errorHistory.add(new Double[] { (double) i, errorSE, errorBinary });
			}
		}

		writeErrorHistory(errorHistory);
		printResult();

		System.out.println("Sum of squared errors = " + errorSE);
		System.out.println("Sum of binary errors = " + errorBinary);
		System.out.println("##### EPOCH " + i + "\n");
		if (i == maxSteps) {
			System.out.println("Training doesn't achieve predefined goal!");
		}

		writeAllWeights();
		printWeightUpdate();
	}

	void printResult() {
		System.out.println("NN example with xor training");
		for (int p = 0; p < inputs.size(); p++) {
			System.out.print("INPUTS: ");
			for (int x = 0; x < layers[0]; x++) {
				System.out.print(inputs.get(p)[x] + " ");
			}

			System.out.print("EXPECTED: ");
			for (int x = 0; x < layers[3]; x++) {
				System.out.print(expectedOutputs.get(p)[x] + " ");
			}

			System.out.print("ACTUAL: ");
			for (int x = 0; x < layers[3]; x++) {
				System.out.print(resultOutputs.get(p)[x] + " ");
			}
			System.out.println();
		}
		System.out.println();
	}

	String weightKey(int neuronId, int conId) {
		return "N" + neuronId + "_C" + conId;
	}

	/**
	 * Take from hash table and put into all weights
	 */
	public void updateAllWeights() {
		// update weights for the output layer
		for (Neuron n : outputLayer) {
			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				String key = weightKey(n.id, con.id);
				double newWeight = weightUpdate.get(key);
				con.setWeight(newWeight);
			}
		}

		// update weights for the hidden layer
		for (Neuron n : hiddenLayer2) {
			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				String key = weightKey(n.id, con.id);
				double newWeight = weightUpdate.get(key);
				con.setWeight(newWeight);
			}
		}

		for (Neuron n : hiddenLayer1) {
			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				String key = weightKey(n.id, con.id);
				double newWeight = weightUpdate.get(key);
				con.setWeight(newWeight);
			}
		}
	}

	// trained data
	void trainedWeights() {
		weightUpdate.clear();
	}

	public void printWeightUpdate() {
		System.out.println("printWeightUpdate, put this in trainedWeights() and set isTrained to true");

		// weights for the hidden layer
		for (Neuron n : hiddenLayer1) {
			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				String w = df.format(con.getWeight());
				System.out.println("weightUpdate.put(weightKey(" + n.id + ", " + con.id + "), " + w + ");");
			}
		}

		for (Neuron n : hiddenLayer2) {
			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				String w = df.format(con.getWeight());
				System.out.println("weightUpdate.put(weightKey(" + n.id + ", " + con.id + "), " + w + ");");
			}
		}

		// weights for the output layer
		for (Neuron n : outputLayer) {
			ArrayList<Connection> connections = n.getAllInConnections();
			for (Connection con : connections) {
				String w = df.format(con.getWeight());
				System.out.println("weightUpdate.put(weightKey(" + n.id + ", " + con.id + "), " + w + ");");
			}
		}

		System.out.println();
	}

	public void writeAllWeights() {
		Path path = Paths.get("result/" + networkName + "_weights.txt");

		try (BufferedWriter writer = Files.newBufferedWriter(path, ENCODING)) {
			System.out.println("writeAllWeights");

			writer.write("Random weight multiplier: " + this.randomWeightMultiplier + "\n");
			writer.write("Learning rate: " + this.learningRate + "\n");
			writer.write("Momentum: " + this.momentum + "\n");
			writer.write("Active threshold: " + this.activeThreshold + "\n");
			writer.write("\n");
			writer.write("Max steps: " + this.maxSteps + "\n");
			writer.write("Min error condition: " + this.minError + "\n");
			writer.write("\n");

			// weights for the hidden layer
			writer.write("### Hidden Layer 1 ###\n");
			for (Neuron n : hiddenLayer1) {
				ArrayList<Connection> connections = n.getAllInConnections();
				for (Connection con : connections) {
					double w = con.getWeight();
					System.out.println("n=" + n.id + " c=" + con.id + " f=" + con.leftNeuron.id + " w=" + w + " mix="
							+ n.getMixDetect());
					writer.write("n=" + n.id + " c=" + con.id + " f=" + con.leftNeuron.id + " w=" + w + " mix="
							+ n.getMixDetect() + "\n");
				}
			}

			writer.write("\n");

			writer.write("### Hidden Layer 2 ###\n");
			for (Neuron n : hiddenLayer2) {
				ArrayList<Connection> connections = n.getAllInConnections();
				for (Connection con : connections) {
					double w = con.getWeight();
					System.out.println("n=" + n.id + " c=" + con.id + " f=" + con.leftNeuron.id + " w=" + w + " mix="
							+ n.getMixDetect());
					writer.write("n=" + n.id + " c=" + con.id + " f=" + con.leftNeuron.id + " w=" + w + " mix="
							+ n.getMixDetect() + "\n");
				}
			}

			writer.write("\n");

			// weights for the output layer
			writer.write("### Output Layer ###\n");
			for (Neuron n : outputLayer) {
				ArrayList<Connection> connections = n.getAllInConnections();
				for (Connection con : connections) {
					double w = con.getWeight();
					System.out.println("n=" + n.id + " c=" + con.id + " f=" + con.leftNeuron.id + " w=" + w + " mix="
							+ n.getMixDetect());
					writer.write("n=" + n.id + " c=" + con.id + " f=" + con.leftNeuron.id + " w=" + w + " mix="
							+ n.getMixDetect() + "\n");
				}
			}

			System.out.println();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public void writeErrorHistory(List<Double[]> errHist) {
		Path path = Paths.get("result/" + networkName + "_errHist.csv");
		try (BufferedWriter writer = Files.newBufferedWriter(path, ENCODING)) {
			writer.write("Epoch,SquaredError,BinaryError\n");
			for (Double[] record : errHist) {
				String line = record[0] + "," + record[1] + "," + record[2];
				writer.write(line);
				writer.newLine();
			}
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
}
