package projects;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Random;
import java.util.Scanner;

public class NeuralNet extends SupervisedLearner {
		
	public void train(Matrix features, Matrix labels) throws Exception {	
		epochNum = 0;
		misclassification = 0;
		sse = 0;
		try {
			writer.write("Train Set MSE, Test Set MSE, " +
					"Validation Set Misclassification, Epochs, Mean Squared Error, Avg. Misclassification\n");
		} catch(IOException e) { e.printStackTrace(); }
		
		vowelSet = false;
		if (features.cols() > 4)
			vowelSet = true;
		
		if (vowelSet) {
			featureToStartWith = 0;
			features = new Matrix(features, 
					0, featureToStartWith, features.rows(), features.cols() - featureToStartWith);
		}
		else
			featureToStartWith = 0;
		
		neuralNet = initNeuralNet(features, labels);		
		
		int seed = random.nextInt();
		random.setSeed(seed);
		features.shuffle(random);
		random.setSeed(seed);
		labels.shuffle(random);
		
		int rowCutoff = features.rows() / 10;
		
		Matrix trainFeatures = new Matrix(features, 
				rowCutoff, 0, features.rows() - rowCutoff, features.cols());
		Matrix trainLabels = new Matrix(labels, 
				rowCutoff, 0, labels.rows() - rowCutoff, labels.cols());
		
		Matrix validationFeatures = new Matrix(features, 
				0, 0, rowCutoff, features.cols());
		Matrix validationLabels = new Matrix(labels,
				0, 0, rowCutoff, labels.cols());
		
		double accuracy = 0;
		int count = 0;
		int numEpochs = 0;
		boolean validationSetIsImproving = true;
		while (validationSetIsImproving && numEpochs++ < 10000) {
			trainEpoch(features, labels);
			
			double trainAccuracy = measureAccuracy(features, labels, null);
			writer.write(String.format("%.4f,", 1 - trainAccuracy));
			
			double testAccuracy = measureAccuracy(validationFeatures, validationLabels, null);
			writer.write(String.format("%.4f,", 1 - testAccuracy));
			
			
			double numCorrect = 0;
			for (int i = 0; i < validationFeatures.rows(); i++) {
				double target = validationLabels.row(i)[0];
				predict(i, validationFeatures.row(i), validationLabels.row(i));
				if (validationLabels.row(i)[0] == target)
					numCorrect += 1;
				
				validationLabels.row(i)[0] = target;
				
				assert (validationLabels.row(i)[0] == target);
			}
			double prevAccuracy = accuracy;
			accuracy = numCorrect / validationFeatures.rows();
			if (accuracy <= prevAccuracy) {
				count++;
				if (count > 15)
					validationSetIsImproving = false;
			}
			else {
				count = 0;
			}
			
			valSetError = (validationFeatures.rows() - numCorrect) / numCorrect;
			writer.write(String.format("%.2f,", valSetError));
			
			
			seed = random.nextInt();
			random.setSeed(seed);
			trainFeatures.shuffle(random);
			random.setSeed(seed);
			trainLabels.shuffle(random);
		}
		System.out.println(numEpochs);
		
		printNeuralNet();
		writer.close();
	}
	
	private void trainEpoch(Matrix features, Matrix labels) {
		
		misclassification = 0;
		sse = 0;
		for (int r = 0; r < features.rows(); r++) {
			trainRow(features.row(r), labels.row(r));
			backpropagate();
			
			
			double maxValue = 0;
			double maxClass = 0;		
			for (int i = 0; i < numClassesFromLabel; i++) {
				if (neuralNet[0][i].getOutValue() > maxValue) {
					maxValue = neuralNet[0][i].getOutValue();
					maxClass = i;
				}
			}
			if ((double)maxClass != labels.row(r)[0])
				misclassification++;

				
				
			for (int i = 0; i < neuralNet[0].length; i++) {
				double target = neuralNet[0][i].getTargetValue();
				double predicted = neuralNet[0][i].getOutValue();
				sse += Math.pow(target - predicted, 2);
			}
		}
		double mse = sse / features.rows();
		

		
		try {
			writer.write(String.format("%d,", ++epochNum));
			writer.write(String.format("%.2f,", mse));
			writer.write(String.format("%.2f\n", misclassification / features.rows()));
		}catch(IOException e) { e.printStackTrace(); }
	}
	
	private void trainRow(double[] features, double[] labels) {
				
		// Set target values for output nodes (handles multiple outputs)
//		for (int i = 0; i < neuralNet[0].length; i++)
//			neuralNet[0][i].setTargetValue(labels[i]);
		for (int i = 0; i < numClassesFromLabel; i++) {
			if ((double) i == labels[0])
				neuralNet[0][i].setTargetValue(1);
			else
				neuralNet[0][i].setTargetValue(0);			
		}
		
		// Clear all previous netValues
		for (int i = 0; i < neuralNet.length; i++)
			for (int j = 0; j < neuralNet[i].length; j++)
				neuralNet[i][j].setNetValue(0);
		
		// Loop through each row in the neuralNet
		for (int row = neuralNet.length - 1; row > 0; row--) {
			
			// Initialize the output values of the input nodes (only first time through on input row)
			Node[] nodeRow = neuralNet[row];
			if (row == neuralNet.length - 1) {
				for (int i = 0; i < nodeRow.length - 1; i++)
					nodeRow[i].setOutValue(features[i]);
				nodeRow[nodeRow.length - 1].setOutValue(BIAS);
			}
			
			// Loop through each Node in row, eventually computing all netValues for next row
			for (int i = 0; i < nodeRow.length; i++) {				
					
				Node node = nodeRow[i];
				Node[] outNodes = node.getOutNodes();
				double[] outWeights = node.getOutWeights();		
				
				// Loop through each outputNode from node, eventually computing dot-products
				for (int j = 0; j < outNodes.length; j++) {
					Node outNode = outNodes[j];
					double amount = outWeights[j] * node.getOutValue();
					outNode.addToNetValue(amount);
				}				
			}
			
			// Compute outValues from netValues for each Node in next row
			Node[] aboveRowNodes = nodeRow[0].getOutNodes();
			for (int i = 0; i < aboveRowNodes.length; i++) {
				Node aboveRowNode = aboveRowNodes[i];
				double netValue = aboveRowNode.getNetValue();
				double outValue = 1.0 / (1.0 + Math.pow(Math.E, -netValue));
				aboveRowNode.setOutValue(outValue);
			}
		}
	}
	
	private void backpropagate() {
		
		for (int i = 0; i < neuralNet.length - 1; i++) { // -1 gets rid of computing delta for input row
			for (int j = 0; j < neuralNet[i].length; j++)
				computeDelta(i, j);			
			computeOutWeights(i + 1);
		}					
	}
	
	private void computeDelta(int i, int j) {
		
		double delta = 0.0;
		Node node = neuralNet[i][j];
		double output = node.getOutValue();
		
		if (i == 0) { // it's an output node
			double target = node.getTargetValue();
			delta = (target - output) * output * (1 - output);
		}
		
		else { // it's a hidden node
			double sigmaDerivative = output * (1 - output);
			Node[] outNodes = node.getOutNodes();
			for (int k = 0; k < outNodes.length; k++) {
				Node outNode = outNodes[k];
				double outNodeDelta = outNode.getDelta();
				double outWeight = node.getOutWeights()[k];
				delta += outNodeDelta * outWeight;
			}
			delta *= sigmaDerivative;
		}
		node.setDelta(delta);
	}
	
	private void computeOutWeights(int i) {
		
		Node[] nodes = neuralNet[i];
		for (int k = 0; k < nodes.length; k++) {
			
			Node node = nodes[k];
			double[] outWeights = node.getOutWeights();
			Node[] outNodes = node.getOutNodes();
			
			double momentum = 0;
			for (int l = 0; l < outWeights.length; l++) {
				outWeights[l] += LEARN_RATE * outNodes[l].getDelta() * node.getOutValue() + momentum;
				momentum = momentumTerm * LEARN_RATE * outNodes[l].getDelta() * node.getOutValue();
			}
			
			node.setOutWeights(outWeights);
		}
	}

	public void predict(int row , double[] features, double[] labels) throws Exception {
		
		if (neuralNet[0].length != features.length) {
			double[] newFeatures = new double[features.length - featureToStartWith];
			for (int i = 0; i < newFeatures.length; i++) {
				newFeatures[i] = features[i + featureToStartWith];
			}
			features = newFeatures;
		}
		
		trainRow(features, labels);
		
		double maxValue = 0;
		double maxClass = 0;
		
		for (int i = 0; i < numClassesFromLabel; i++) {
			if (neuralNet[0][i].getOutValue() > maxValue) {
				maxValue = neuralNet[0][i].getOutValue();
				maxClass = i;
			}
		}
		labels[0] = maxClass;
		
	}	
	
	private Node[][] initNeuralNet(Matrix features, Matrix labels) {
		
		Scanner scanner = new Scanner(System.in);
		System.out.println("Enter the number of hidden nodes per layer: ");
		int numNodesPerHiddenLayer = scanner.nextInt();
		System.out.println("Enter the number of hidden layers: ");
		int numHiddenLayers = scanner.nextInt();
		
		System.out.println("Momentum term: ");
		momentumTerm = scanner.nextDouble();
		
		// Compute the number of classes are possible for the output label
		int z = 0;
		while(labels.attrValue(0, z++) != null)		
			numClassesFromLabel = z;	
		
		// Decide how many layers and how many nodes per layer in neuralNet
		int numInputs = features.cols();
		int numOutputs = numClassesFromLabel;
		
		// Init the neuralNet 2D array with all the right lengths
		Node[][] neuralNet = new Node[1 + numHiddenLayers + 1][];
		neuralNet[0] = new Node[numOutputs];
		neuralNet[neuralNet.length - 1] = new Node[numInputs + 1]; // +1 for bias
		for (int i = 1; i < neuralNet.length - 1; i++)
			neuralNet[i] = new Node[numNodesPerHiddenLayer + 1]; // +1 for bias
		
		// Create new empty Nodes for each index in the neuralNet 2D array
		for (int i = 0; i < neuralNet.length; i++)
			for (int j = 0; j < neuralNet[i].length; j++)
				neuralNet[i][j] = new Node(i, j);
		
		// Set the inputNodes, outputNodes, and outputWeights for each Node in neuralNet
		for (int i = 1; i < neuralNet.length; i++)
			for (int j = 0; j < neuralNet[i].length; j++) {				
				if (i == 1) {
					neuralNet[i][j].setOutNodes(neuralNet[i - 1]);
				}
				else {
					Node[] outputNodes = new Node[neuralNet[i - 1].length - 1];
					for (int a = 0; a < outputNodes.length; a++)
						outputNodes[a] = neuralNet[i - 1][a];
					neuralNet[i][j].setOutNodes(outputNodes);
				}
				
				double[] outputWeights = new double[neuralNet[i][j].getOutNodes().length];
				for (int k = 0; k < outputWeights.length; k++) {
					outputWeights[k] = Tools.computeRandomWeight(random);
				}
				neuralNet[i][j].setOutWeights(outputWeights);
			}		
		
		return neuralNet;		
	}
	
	public void printNeuralNet() {
		String neuralNetString = "";
		neuralNetString += "neuralNet:\n";
		for (int i = 0; i < neuralNet.length; i++)
			for (int j = 0; j < neuralNet[i].length; j++) {
				neuralNetString += neuralNet[i][j].toString();
			}
		
		try {
			writer.write(neuralNetString); 
		}
		catch(IOException e) {e.printStackTrace();}
	}
	
	public NeuralNet(Random r) {
		random = r;
		try { writer = new BufferedWriter(new FileWriter("output/" + FILE_NAME)); }
		catch(IOException e) { e.printStackTrace(); }
	}

	private Node[][] neuralNet;
	private Random random;
	private final double LEARN_RATE = .05;
	private final double BIAS = 1;
	private final String FILE_NAME = "out.csv";
	private BufferedWriter writer;
	private int numClassesFromLabel;
	private double momentumTerm;
	private int featureToStartWith;
	private boolean vowelSet;
	private double sse; // sum squared error
	private int epochNum = 0;
	private double misclassification = 0;
	private double valSetError = 0;
}
