package backproped;

import gui.Filesystem;
import gui.NeuralNetworkSet;

import java.io.FileReader;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.ArrayBlockingQueue;

public class MomentumNeuronLayer {
	private ArrayList<ContinuousPerceptron> neurons;
	private double learningRate;
	private MomentumNeuronLayer previousLayer;
	private MomentumNeuronLayer nextLayer;

	// 1 weight per input per perceptron
	double[][] lastWeight;
	double[][] previousWeight;
	// 1 output per perceptron
	private double[] lastOutput;
	// the input is given to all perceptrons, only one copy
	private double[] lastInput;

	public MomentumNeuronLayer(int numOfNeurons, double[][] weights, double c) {
		lastWeight = weights;
		learningRate = c;
		neurons = new ArrayList<ContinuousPerceptron>(numOfNeurons);
		ContinuousPerceptron perceptron;
		for (int i = 0; i < numOfNeurons; i++) {
			perceptron = new ContinuousPerceptron(weights[i], learningRate);
			neurons.add(perceptron);
		}
	}

	/*
	 * Connect one layer to its previous, feeding, layer
	 */
	public void setPreviousLayer(MomentumNeuronLayer cnl) {
		previousLayer = cnl;
	}

	/*
	 * Connect one layer to its next layer
	 */
	public void setNextLayer(MomentumNeuronLayer cnl) {
		nextLayer = cnl;
	}

	/*
	 * Collect the weight matrix for all neurons on a layer
	 */
	public double[][] getLastWeights() {
		return lastWeight;
	}

	public double[][] getPreviousWeights() {
		return previousWeight;
	}

	/*
	 * Find number of neurons in each layer
	 */
	public int getNumOfNeurons() {
		return neurons.size();
	}

	/*
	 * This method will generate the Output Matrix z, each final output being
	 * passed through the TLU fn This will be used for validation and testing,
	 * it won't update any weights
	 */
	public double[] classifyInput(double[] input) {
		double[] output = new double[neurons.size()];
		int i = 0;
		// account for the need for bias inputs
		if (nextLayer == null && previousLayer != null) {
			double[] temp = new double[input.length + 1];
			// copy over the original input
			for (i = 0; i < input.length; i++) {
				temp[i] = input[i];
			}
			// add the bias
			temp[temp.length - 1] = -1;
			input = temp;
			i = 0;
		}
		// there should only be one output per cp
		for (ContinuousPerceptron cp : neurons) {
			output[i] = cp.calcOutput(input);
			i++;
		}

		i = 0;
		if (nextLayer != null) {
			output = nextLayer.classifyInput(output);
		} else {
			// if we are the final layer we need to put the output through the
			// TLU
			for (i = 0; i < output.length; i++) {
				output[i] = this.TLU(output[i]);
			}
		}
		return output;
	}

	/*
	 * This method will generate the Output Matrix z, without final output being
	 * passed through the TLU fn This will be used for validation and testing,
	 * it won't update any weights
	 * 
	 * TODO: Combine this with the above TLU-based method
	 */
	public double[] testOutput(double[] input) {
		double[] output = new double[neurons.size()];
		int i = 0;
		// account for the need for bias inputs
		if (nextLayer == null && previousLayer != null) {
			double[] temp = new double[input.length + 1];
			// copy over the original input
			for (i = 0; i < input.length; i++) {
				temp[i] = input[i];
			}
			// add the bias
			temp[temp.length - 1] = -1;
			input = temp;
			i = 0;
		}
		// there should only be one output per cp
		for (ContinuousPerceptron cp : neurons) {
			output[i] = cp.calcOutput(input);
			i++;
		}

		if (nextLayer != null)
			output = nextLayer.testOutput(output);
		return output;
	}

	/*
	 * This is used to train the layer based upon the input and the desired
	 * matrices This will work using feed forward back propagation using the
	 * Delta Learning Rule
	 * 
	 * This returns the ERROR SIGNAL, which is required for recursion.
	 */
	public double[] trainLayer(double[] input, double[] desired) {
		double[] output = new double[neurons.size()];
		int i = 0;
		// account for the need for bias inputs
		if (nextLayer == null && previousLayer != null) {
			double[] temp = new double[input.length + 1];
			// copy over the original input
			for (i = 0; i < input.length; i++) {
				temp[i] = input[i];
			}
			// add the bias
			temp[temp.length - 1] = -1;
			input = temp;
			i = 0;
		}
		for (ContinuousPerceptron cp : neurons) {
			output[i] = cp.calcOutput(input);
			i++;
		}

		lastInput = input;
		lastOutput = output;

		double[] errorSignal = null;
		// if there is a next layer, propagate to it
		if (nextLayer != null) {
			errorSignal = nextLayer.trainLayer(output, desired);
			// and then update our weights
			this.updateWeightings(errorSignal);
		} // else if we are on the final layer
		else {
			errorSignal = calculateErrorSignal(output, desired);
			this.updateWeightings(errorSignal);
			// now update the error signal
			for (i = 0; i < output.length; i++) {
				errorSignal[i] = errorSignal[i]
						* (0.5 * (1 - Math.pow(output[i], 2)));
			}
		}

		return errorSignal;
	}

	/*
	 * Find the error signal by substracting the output from the desired output
	 * 
	 * TODO: See if we can add the multiplication by the derivate dz (0.5
	 * (1-z)^2)) in here rather than doing it afterwards
	 */
	private double[] calculateErrorSignal(double[] output, double[] desired) {
		double[] errorSignal = new double[output.length];
		for (int i = 0; i < output.length; i++) {
			errorSignal[i] = desired[i] - output[i];
		}

		return errorSignal;
	}

	/*
	 * This is the method that actual changes the weights for a neuron based
	 * upon the delta learning rule and back propagation
	 * 
	 * The weights are updated slightly differently depending on whether we are
	 * a hidden layer or an output layer
	 */
	private void updateWeightings(double[] errorSignal) {
		double[][] newWeighting = new double[neurons.size()][lastWeight[0].length];
		int i, j, k;
		i = 0;
		double errorSignalSum = 0.00;
		double momentum = 0;
		if (nextLayer != null) {
			// we know that we're a hidden layer if we have a next layer defined
			// modified weights will be
			// w(ji)* = w(ji) + n*(0.5*(1 - y(i)^2)*x(i)*(error sig'*weighting)
			// don't worry about bias neurons for the moment

			// grab the weights between the hidden layer and the output layer
			double[][] nextLayerWeights = nextLayer.getLastWeights();
			int numberOfOutputs = nextLayer.getNumOfNeurons();
			for (ContinuousPerceptron cp : neurons) {
				if (previousWeight == null)
					previousWeight = lastWeight; // do this for the first one
				errorSignalSum = 0;
				for (k = 0; k < numberOfOutputs; k++) {
					errorSignalSum += errorSignal[k] * nextLayerWeights[k][i];
				}

				for (j = 0; j < lastWeight[0].length; j++) {
					newWeighting[i][j] = lastWeight[i][j]
							+ learningRate * (0.5 * (1 - Math.pow(lastOutput[i], 2)) * lastInput[j] * errorSignalSum) 
							+ momentum *(lastWeight[i][j] - previousWeight[i][j]);
				}
				cp.setWeights(newWeighting[i]);
				// save the previous weight
				previousWeight[i] = lastWeight[i];
				lastWeight[i] = newWeighting[i];
				i++;
			}
		} else {
			// otherwise update it by the regular delta learning rule
			// w* = w + n(d-z)*0.5(1-z^2)*y
			if (previousWeight == null)
				previousWeight = lastWeight; // do this for the first one

			for (k = 0; k < errorSignal.length; k++) {
				errorSignalSum += errorSignal[k];
			}

			for (ContinuousPerceptron cp : neurons) {
				for (j = 0; j < lastWeight[0].length; j++) {
					newWeighting[i][j] = lastWeight[i][j] + learningRate
							* errorSignalSum * 0.5 * (1 - Math.pow(lastOutput[i], 2)) * lastInput[j]
							+ momentum *(lastWeight[i][j] - previousWeight[i][j]);
				}
				cp.setWeights(newWeighting[i]);
				// save the previous weight
				previousWeight[i] = lastWeight[i];
				lastWeight[i] = newWeighting[i];
				i++;
			}
		}
	}

	/*
	 * Can be replaced with Math.signum(double input)
	 */
	public int TLU(double input) {
		if (input > 0)
			return 1;
		else
			return -1;
	}

	public static int max(double[] input) {
		//find the max in the array
		int maxNum = 0;
		double max = -999999; 
		for (int i = 0; i < input.length; i++) {
			if (input[i] > max) { 
				max = input[i];
				maxNum = i;
			}
		}
		return maxNum;
	}
	/*
	 * This will generate a size num matrix that can be used as weights The
	 * weights will always be between 0 and 1
	 */
	public static double[] generateRandomWeights(int num) {
		double[] weights = new double[num];

		for (int i = 0; i < num; i++) {
			if (i % 2 == 0)
				weights[i] = Math.random();
			else
				weights[i] = -1 + Math.random();
		}

		return weights;
	}

	/*
	 * This is strictly for the creation of a 2 layer neuron network, 1 hidden
	 * layer, 1 output
	 * 
	 * Inputs: There are n inputs, x long Desired: There are n desired outputs,
	 * x long
	 * 
	 * Taking the learning constant to be 0.2
	 */
	public static MomentumNeuronLayer create2LayerNetwork(int numOfInput,int numOfOutput, int numOfHidden) {
		// the number of desired is the number of output neurons

		// if no numOfHidden, we should find the ideal size, this is a bit of
		// work atm
		double[][] hiddenWeights = new double[numOfHidden][numOfInput];
		for (int i = 0; i < numOfHidden; i++) {
			hiddenWeights[i] = generateRandomWeights(numOfInput);
		}
		// num of weights on hidden layer = length of first input size
		MomentumNeuronLayer hiddenLayer = new MomentumNeuronLayer(numOfHidden,hiddenWeights, 0.05);

		double[][] outputWeights = new double[numOfOutput][numOfHidden + 1]; 
		// +1 because we have a bias input as well 
													
		for (int i = 0; i < numOfOutput; i++) {
			outputWeights[i] = generateRandomWeights(numOfHidden + 1);
		}

		MomentumNeuronLayer outputLayer = new MomentumNeuronLayer(numOfOutput,outputWeights, 0.05);

		// link them up
		hiddenLayer.setNextLayer(outputLayer);
		outputLayer.setPreviousLayer(hiddenLayer);

		return hiddenLayer;
	}

	/*
	 * This allows you to load saved weights in a matrix
	 * 
	 * TODO: Generalise this out
	 */
	public static MomentumNeuronLayer load2LayerNetwork(double[][] hiddenLayerWeights, double[][] outputLayerWeights) {
		MomentumNeuronLayer hiddenLayer = new MomentumNeuronLayer(hiddenLayerWeights.length, hiddenLayerWeights, 0.2);
		MomentumNeuronLayer outputLayer = new MomentumNeuronLayer(outputLayerWeights.length, outputLayerWeights, 0.2);

		hiddenLayer.setNextLayer(outputLayer);
		outputLayer.setPreviousLayer(hiddenLayer);

		return hiddenLayer;
	}

	/*
	 * This will load up the weights from a filename. There are actually 2 files
	 * that need the same pattern as the name: filename-output.txt and
	 * filename-hidden.txt This function will create a 2 layer neural net with
	 * the loaded weights
	 */
	public static MomentumNeuronLayer loadWeights(String filename) {
		double[][] hiddenLayerWeights = null;
		double[][] outputLayerWeights = null;
		ArrayList<ArrayList<Double>> allLayerWeights = new ArrayList<ArrayList<Double>>();
		ArrayList<Double> oneLayerWeight = new ArrayList<Double>();

		String input = "";

		try {
			FileReader reader = new FileReader(filename);
			int read = 0;
			while (read != -1) {
				read = reader.read();
				if (read != -1) {
					if (read == (int) (',')) {
						oneLayerWeight.add(Double.valueOf(input));
						input = ""; // reset this
					} else if (read == (int) ('\n')) {
						allLayerWeights.add(oneLayerWeight);
						oneLayerWeight = new ArrayList<Double>();
						input = ""; // reset this
					} else {
						input += (char) read;
					}
				}
			}
			reader.close();
			// translate from the array list to the array
			outputLayerWeights = new double[allLayerWeights.size()][allLayerWeights.get(0).size()];
			for (int i = 0; i < allLayerWeights.size(); i++) {
				for (int j = 0; j < allLayerWeights.get(0).size(); j++) {
					outputLayerWeights[i][j] = allLayerWeights.get(i).get(j);
				}
			}

			allLayerWeights.clear();
			oneLayerWeight.clear();
			filename = filename.substring(0, filename.lastIndexOf('-'));
			filename += "-hidden.txt";
			reader = new FileReader(filename);
			read = 0;

			while (read != -1) {
				read = reader.read();
				if (read != -1) {
					if (read == (int) (',')) {
						oneLayerWeight.add(Double.valueOf(input));
						input = ""; // reset this
					} else if (read == (int) ('\n')) {
						allLayerWeights.add(oneLayerWeight);
						oneLayerWeight = new ArrayList<Double>();
						input = ""; // reset this
					} else {
						input += (char) read;
					}
				}
			}
			reader.close();
			hiddenLayerWeights = new double[allLayerWeights.size()][allLayerWeights
					.get(0).size()];
			for (int i = 0; i < allLayerWeights.size(); i++) {
				for (int j = 0; j < allLayerWeights.get(0).size(); j++) {
					hiddenLayerWeights[i][j] = allLayerWeights.get(i).get(j);
				}
			}

		} catch (Exception e) {
			System.out.println(e);
		}
		MomentumNeuronLayer hiddenLayer = load2LayerNetwork(hiddenLayerWeights,outputLayerWeights);

		return hiddenLayer;
	}

	// TODO::Peter. You can see here how I am saving the weights, with the
	// inputs being a 1 or -1 it can be even more simplistic
	// But I suggest a comma seperated list, with line breaks when the matrix
	// finishes
	// We might want some common naming scheme so that we can reload in testing
	// and validation sets for training constantly?
	/*
	 * This will save both the hidden and output layer weights to a filename
	 * following nnWeights-time-output.txt or nnWeights-time-hidden.txt
	 */
	public boolean saveWeights(String suffix) {
		boolean success = false;
		// this requires to move through the layers, luckily we know we only
		// need to observe the next layer
		double[][] outputLayerWeights = this.nextLayer.getLastWeights();
		// hidden layer weights is on this object

		// write to a file, generate the name auto-magically
		String filename = "c:\\nnWeights-";
		if (suffix == null)
			filename += String.valueOf(System.currentTimeMillis());
		else
			filename += suffix;

		try {
			FileWriter writer = new FileWriter(filename + "-output.txt");
			for (double[] weightArray : outputLayerWeights) {
				for (double weight : weightArray) {
					writer.write(weight + ",");
				}
				writer.write("\n");
			}
			writer.close();
			writer = new FileWriter(filename + "-hidden.txt");
			for (double[] weightArray : lastWeight) {
				for (double weight : weightArray) {
					writer.write(weight + ",");
				}
				writer.write("\n");
			}
			writer.close();
			success = true;
		} catch (Exception e) {
			System.out.println(e);
		}
		return success;
	}

	public int trainTillGoodEnough(double[][] trainingInputs, double[][] desired, double[][] validationInputs) {
		// there are n inputs
		int numOfInputs = trainingInputs.length;
		int trainingCycleNum = 0;

		double[] outputValidation;
		double[] outputTraining;
		boolean success = false;
		//This is the map of the following
		//	TrainingCycleNum ->
		//				Input Number ->
		//					Error On Each Pt in the Matrix
		//	We should simplify this
		//	TrainingCycleNum ->
		//				Error on each Input
		//This would be a HashMap<Integer, ArrayList<Double>>
		//We don't actually care about the individual points
		HashMap<Integer, ArrayList<Double>> cycleErrorTraining = new HashMap<Integer, ArrayList<Double>>();		
		HashMap<Integer, ArrayList<Double>> cycleErrorValidation = new HashMap<Integer, ArrayList<Double>>();
		
		ArrayBlockingQueue<Double[][]> weightsHidden = new ArrayBlockingQueue<Double[][]>(4); // save the past 4 weights
		ArrayBlockingQueue<Double[][]> weightsOutput = new ArrayBlockingQueue<Double[][]>(4);
		
		int mostClassified = 0;
		int mostClassifiedCycleNum = 0;
		double[][] mostClassifiedWeightsHidden = new double[1][1];
		double[][] mostClassifiedWeightsOutput = new double[1][1];
		// run until the validation error is increasing for 3 consecutive
		// periods
		// when this condition occurs, go back and grab the weights from the
		// last period before it was increasing
		// make these the current weights in the neural net
		// return the cycle num
		while (!success) {
			// train for all inputs
			for (int i = 0; i < numOfInputs; i++) {
				this.trainLayer(trainingInputs[i], desired[i]);
			}
			trainingCycleNum++;
			ArrayList<Double> tempErrorTraining = new ArrayList<Double>();
			ArrayList<Double> tempErrorValidation = new ArrayList<Double>();
			
			// now try to go through all the inputs and classify them
			for (int i = 0; i < numOfInputs; i++) {
				outputValidation = this.testOutput(validationInputs[i]);
				outputTraining = this.testOutput(trainingInputs[i]);
				double cycleError = 0;
				
				for (int j = 0; j < outputTraining.length; j++) {
					cycleError += desired[i][j] - outputTraining[j];
				}
				//Cycle Error now contains the error for that input for this cycle
				tempErrorTraining.add(cycleError);
				
				cycleError = 0;				
				for (int j = 0; j < outputTraining.length; j++) {
					cycleError += desired[i][j] - outputValidation[j];
				}
				tempErrorValidation.add(cycleError);	
			}
			cycleErrorTraining.put(trainingCycleNum, tempErrorTraining);
			cycleErrorValidation.put(trainingCycleNum, tempErrorValidation);
			
			if (trainingCycleNum > 4) {
				double[] cycleErrorSum = new double[4];
				
				for (int j = 0; j < 4; j++) {
					cycleErrorSum[j] = 0;
					ArrayList<Double> cycleError = cycleErrorValidation.get(trainingCycleNum-j); //grab the last 4
					for (Double d : cycleError) { //sum up all the errors for this cycle
							cycleErrorSum[j] += d;
					}
					cycleErrorSum[j] = Math.pow(cycleErrorSum[j], 2) * 0.5; 
					//System.out.println("Cycle #"+(trainingCycleNum-j)+" has error " + cycleErrorSum[j]);
				}
				boolean errorUp = true;
/*				boolean[] lazyErrorUp = new boolean[3];
				for (int j = 2; j > 0; j--) {
					if (cycleErrorSum[j] < cycleErrorSum[j+1])
						lazyErrorUp[j] = true;
					else
						lazyErrorUp[j] = false;
				}
				if (lazyErrorUp[0] && lazyErrorUp[1] && lazyErrorUp[2])
					errorUp = true;*/
				int numClassified = 0;
				int totalClassified = 0;
				for (int j = 0; j < numOfInputs; j++) {
					int max = max(this.testOutput(trainingInputs[j]));
					if (max == (j % 10)) {
						errorUp = errorUp && true;
						numClassified++;
					}
					else
						errorUp = false;
				}
				totalClassified = numClassified;
				System.out.print("CycleNum #"+trainingCycleNum+" numClassified="+numClassified+"/20 = "+(numClassified*5)+"%");	
				numClassified = 0;
				for (int j = 0; j < numOfInputs; j++) {
					int max = max(this.testOutput(validationInputs[j]));
					if (max == (j % 10)) 
						numClassified++;	
				}
				System.out.println(" numClassified="+numClassified+"/20 = "+(numClassified*5)+"%");
				totalClassified += numClassified;
				
				if (totalClassified > mostClassified) { //we have a new max!
					mostClassifiedWeightsHidden = this.lastWeight;
					mostClassifiedWeightsOutput = this.nextLayer.lastWeight;
					mostClassifiedCycleNum = trainingCycleNum;
					mostClassified = totalClassified;
				}
				
				if (errorUp) {
					//System.out.println("Error is increasing, stop and reset the weights");
					success = true;
					Double[][] weights = null;
					while (weightsHidden.size() > 0) {
						try {
							weights = weightsHidden.take();
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
					}
					this.lastWeight = fromDouble(weights);
					while (weightsOutput.size() > 0) {
						try {
							weights = weightsOutput.take();
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
					}
					this.nextLayer.lastWeight = fromDouble(weights);
				} else {
					//System.out.println("Error is not increasing, continue and save current weights");
					weightsHidden.remove();
					weightsHidden.add(toDouble(this.getLastWeights()));
					weightsOutput.remove();
					weightsOutput.add(toDouble(this.nextLayer.getLastWeights()));
				}
			} else {
				weightsHidden.add(toDouble(this.getLastWeights()));
				weightsOutput.add(toDouble(this.nextLayer.getLastWeights()));
			}
			// additional check to make sure we don't loop for ever
			if (trainingCycleNum > 2500) {
				success = true;
				trainingCycleNum = -1;
			}
		}
		//write out the error info first
		//we have the following data structure HashMap<Integer,ArrayList<double>>>
		//we need to make this into a double[][]
		Double[][] errorCurveTraining = new Double[cycleErrorTraining.size()][1];
		Double[][] errorCurveValidation = new Double[cycleErrorTraining.size()][1];
		int i = 0;
		Double[] tempArray = new Double[1];
		for (ArrayList<Double> values : cycleErrorTraining.values()) {
			errorCurveTraining[i] = values.toArray(tempArray);
			i++;
		}
		i = 0;
		for (ArrayList<Double> values : cycleErrorValidation.values()) {
			errorCurveValidation[i] = values.toArray(tempArray); 
			i++;
		}
		writeErrorCycle(fromDouble(errorCurveTraining));
		writeErrorCycle(fromDouble(errorCurveValidation));
				
		this.saveWeights(null);
		
		//now overwrite the weights and save them
		this.lastWeight = mostClassifiedWeightsHidden;
		this.nextLayer.lastWeight = mostClassifiedWeightsOutput;
		
		this.saveWeights(null);
		
		System.out.println("Classified the most ("+mostClassified+") on Cycle#"+mostClassifiedCycleNum);
		return trainingCycleNum;
	}

	private static Double[][] toDouble(double[][] array) {
		Double[][] outArray = new Double[array.length][array[0].length];
		for (int i = 0; i < array.length; i++) {
			for (int j = 0; j < array[0].length; j++) {
				outArray[i][j] = Double.valueOf(array[i][j]);
			}
		}
		return outArray;
	}

	private static double[][] fromDouble(Double[][] array) {
		double[][] outArray = new double[array.length][array[0].length];
		for (int i = 0; i < array.length; i++) {
			for (int j = 0; j < array[0].length; j++) {
				outArray[i][j] = Double.valueOf(array[i][j]);
			}
		}
		return outArray;
	}

	/*
	 * Returns the number of cycles it has gone through to be trained
	 * 
	 * To then run tests on the trained network, use the testOutput(double[]
	 * input) method
	 */
	public int trainTillClassifed(double[][] trainingInputs,
			double[][] desired, double[][] validationInputs) {
		// there are n inputs
		int numOfInputs = trainingInputs.length;
		int trainingCycleNum = 0;

		double[] errorValidation;
		double[] errorTraining;
		boolean success = false;
		boolean check = true;

		while (!success) {
			// train for all inputs
			for (int i = 0; i < numOfInputs; i++) {
				this.trainLayer(trainingInputs[i], desired[i]);
			}
			trainingCycleNum++;

			// now try to go through all the inputs and classify them
			for (int i = 0; i < numOfInputs; i++) {
				errorValidation = this.classifyInput(validationInputs[i]);
				errorTraining = this.classifyInput(trainingInputs[i]);
				double[] errorMatrix = new double[errorTraining.length];
				boolean trainingClassified = true;
				boolean validationClassified = true;
				for (int j = 0; j < errorTraining.length; j++) {
					errorMatrix[j] = desired[i][j] - errorTraining[j];
					if (errorMatrix[j] != 0) {
						trainingClassified = false;
						j = errorTraining.length; 
					}
				}

				for (int j = 0; j < errorTraining.length; j++) {
					errorMatrix[j] = desired[i][j] - errorValidation[j];
					if (errorMatrix[j] != 0) {
						validationClassified = false;
						j = errorTraining.length; // early break clause
					}
				}
				if (trainingClassified && validationClassified) {
					check = check && true;
				} else
					check = check && false;
			}

			// additional check to make sure we don't loop for ever
			if ((check == false) && (trainingCycleNum > 25000)) {
				success = true;
				trainingCycleNum = -1;
			}

			if (check == true)
				success = true;
			else {
				check = true; // reset it for next run
			}
		}

		return trainingCycleNum;
	}

	public static void writeErrorCycle(double[][] cycleError) {
		String filename = "c:\\nnError-";
		String suffix = null;
		if (suffix == null)
			filename += String.valueOf(System.currentTimeMillis());
		else
			filename += suffix;

		try {
			FileWriter writer = new FileWriter(filename + "-output.txt");
			for (int i = 0; i < cycleError.length; i++) {
				for (int j = 0; j < cycleError[0].length; j++) {
					writer.write(cycleError[i][j] + ",");
				}
				writer.write("\n");
			}
			writer.close();
		} catch (Exception e) {
			System.out.println(e);
		}
	}

	/*
	 * The main function contains references to several different test methods.
	 */
	public static void main(String args[]) {
		//MomentumNeuronLayer.question_3_7_randomWeights();
		//MomentumNeuronLayer.testAssign2();
		//MomentumNeuronLayer.getCycleError();
		MomentumNeuronLayer.trainWith5Inputs();
		//MomentumNeuronLayer.question_3_5();
		//MomentumNeuronLayer.run1LayerTest();
	}

	/*
	 * Quick function to make sure that it saves and loads weights
	 */
	public static void testInputOutput() {
		String fileSuffix = "TEST";
		String filename = "c:\\nnWeights-TEST-output.txt";

		double[][] weights_layer1 = new double[][] { { -6.9938, 6.6736, 1.5555 }, { -4.2812, 3.9127, 3.6233 } };
		double[][] weights_layer2 = new double[][] { { -0.8568, 0.3998, -1.0702 } };

		MomentumNeuronLayer neuralNet = load2LayerNetwork(weights_layer1,
				weights_layer2);

		neuralNet.saveWeights(fileSuffix);

		MomentumNeuronLayer loadedNet = loadWeights(filename);

		System.out.println("Save/Load Test Complete");
		System.out
				.println("Should have identical output on the first net, and the one that had its weights loaded");
		neuralNet.question_3_7();
		loadedNet.question_3_7();

	}

	/*
	 * Quick function to make sure the trainUntilValidated function works
	 */
	public static void testTrainerFn() {
		double[][] weights_layer1 = new double[][] { { -6.9938, 6.6736, 1.5555 }, { -4.2812, 3.9127, 3.6233 } };
		double[][] weights_layer2 = new double[][] { { -0.8568, 0.3998, -1.0702 } };

		double[][] z = new double[4][1];
		double[][] in = new double[][] { { 0, 0, -1 }, { 0, 1, -1 },
				{ 1, 1, -1 }, { 1, 0, -1 } };
		double[][] desired = new double[][] { { -1 }, { 1 }, { -1 }, { 1 } };
		MomentumNeuronLayer neuralNet = load2LayerNetwork(weights_layer1,weights_layer2);

		int numCycles = neuralNet.trainTillClassifed(in, desired, in);

		System.out.println("Trained for " + numCycles + " till validated");
		// Prove validation
		for (int i = 0; i < 4; i++) {
			System.out.println("Input # " + i);
			z[i] = neuralNet.classifyInput(in[i]);
			System.out.println(z[i][0]);
		}

	}

	/*
	 * Taken from the lecture notes, used for most testing
	 */
	public void question_3_7() {
		// input has been augmented
		double[][] in = new double[][] { { 0, 0, -1 }, { 0, 1, -1 },
				{ 1, 1, -1 }, { 1, 0, -1 } };
		double[][] desired = new double[][] { { -1 }, { 1 }, { -1 }, { 1 } };
		double[][] z = new double[4][1];
		double[] cycleError = new double[250];
		int cycleNum = 0;
		for (int i = 0; i < 260; i++) {
			z[i % 4] = this.trainLayer(in[i % 4], desired[i % 4]);
			if (i % 4 == 0 && i != 0) {
				for (int j = 0; j < 4; j++) {
					cycleError[cycleNum] += desired[j][0] - z[j][0];
				}
				cycleNum++;
			}
		}
		for (double error : cycleError) {
			System.out.print(error + ",");
		}
		System.out.println();

		// see if it can correctly classify them
		for (int i = 0; i < 4; i++) {
			System.out.println("Input # " + i);
			z[i] = this.classifyInput(in[i]);
			System.out.println(z[i][0]);
		}
	}

	/*
	 * Taken from the lecture notes, this time with random weights. Should
	 * converge eventually
	 */
	public static void question_3_7_randomWeights() {
		double[][] in = new double[][] { { 0, 0, -1 }, { 0, 1, -1 },
				{ 1, 1, -1 }, { 1, 0, -1 } };
		double[][] desired = new double[][] { { -1 }, { 1 }, { -1 }, { 1 } };
		double[][] z = new double[4][1];

		double[][] weights_layer1 = new double[2][];
		double[][] weights_layer2 = new double[1][];
		weights_layer1[0] = generateRandomWeights(3);
		weights_layer1[1] = generateRandomWeights(3);

		weights_layer2[0] = generateRandomWeights(3);

		MomentumNeuronLayer neuralNet = load2LayerNetwork(weights_layer1,weights_layer2);

		int numCycles = neuralNet.trainTillGoodEnough(in, desired, in);

		System.out.println("Trained for " + numCycles + " till validated");
		// Prove validation
		for (int i = 0; i < 4; i++) {
			System.out.println("Input # " + i);
			z[i] = neuralNet.classifyInput(in[i]);
			System.out.println(z[i][0]);
		}

	}

	public static void trainWith5Inputs() {
		double[][] trainingInputs = new double[10*2][20 * 16 +1];
		double[][] desiredOutputs = new double[10*2][10];
		double[][] validationInputs = new double[10*2][20 * 16 +1];
		//load in training inputs
		String fileLoc = "c:\\nninputs\\train-";
		int numTrain =0;
		int numVal = 0;
		for (int i = 0; i < 10; i++) {
			for (int j = 0; j < 2; j++) {
				NeuralNetworkSet temp = Filesystem.readData(fileLoc+i+"-"+j);
				double[] input = new double[20 * 16 +1];
				int n = 0;
				for (int[] out : temp.getInput()) {
					for (int o : out) {
						input[n] = (double) o;
						n++;
					}
				}
				input[320] = -1;
				trainingInputs[numTrain] = input;
				numTrain++;
			}
			for (int j = 2; j < 4; j++) {
				NeuralNetworkSet temp = Filesystem.readData(fileLoc+i+"-"+j);
				double[] input = new double[20 * 16 +1];
				int n = 0;
				for (int[] out : temp.getInput()) {
					for (int o : out) {
						input[n] = (double) o;
						n++;
					}
				}
				input[320] = -1;
				validationInputs[numVal] = input;
				numVal++;
			}
		} 	
		// with the desired, we know what to expect for each input
		for (int i = 0; i < desiredOutputs.length; i++) {
			for (int j = 0; j < desiredOutputs[0].length; j++) {
				if (i%10 == j)
					desiredOutputs[i][j] = 1;
				else
					desiredOutputs[i][j] = -1;
			}
		}
		
		MomentumNeuronLayer neuralNet = MomentumNeuronLayer.create2LayerNetwork((20 * 16 +1), 10, 30);
		//should probably write a fn to dump the starting weights
		//to provide like comparisons
		int cycleNum = 0;
				
		cycleNum = neuralNet.trainTillGoodEnough(trainingInputs, desiredOutputs, validationInputs);
		
		System.out.println("NumOfCycles= "+cycleNum);
		double[][] z = new double[10][3];
		System.out.println("ON INPUT SET");
		for (int i = 0; i < 10; i++) {
			System.out.print("Input # " + i);
			z[i] = neuralNet.testOutput(trainingInputs[i]);
			int max = max(z[i]);			
			System.out.print(" MAX at: " + max);
			z[i] = neuralNet.testOutput(trainingInputs[i+10]);
			max = max(z[i]);
			System.out.println(" MAX at: " + max);
		}
		System.out.println("FINISHED");
		
		System.out.println("ON VALIDATION SET");
		for (int i = 0; i < 10; i++) {
			System.out.print("Input # " + i);
			z[i] = neuralNet.testOutput(validationInputs[i]);
			int max = max(z[i]);			
			System.out.print(" MAX at: " + max);
			z[i] = neuralNet.testOutput(validationInputs[i+10]);
			max = max(z[i]);
			System.out.println(" MAX at: " + max);
		}
		System.out.println("FINISHED");
		
	}
	public static void firstTest() {
		// we need to load several things
		// training inputs, desired outputs, validation inputs
		double[][] trainingInputs = new double[10][20 * 16];
		double[][] desiredOutputs = new double[10][10];
		double[][] validationInputs = new double[10][20 * 16];
		double[] sum = new double[10];
		// we are reading in from a fixed dir with the following naming scheme
		// nn-training-0.txt, nn-training-1.txt
		// nn-validation-0.txt, etc.

		// with the desired, we know what to expect for each input
		for (int i = 0; i < desiredOutputs.length; i++) {
			for (int j = 0; j < desiredOutputs[0].length; j++) {
				if (i == j)
					desiredOutputs[i][j] = 1;
				else
					desiredOutputs[i][j] = -1;
			}
		}
		for (int k = 0; k < 10; k++) {
			NeuralNetworkSet temp = Filesystem.readData("c:\\nninputs\\nn-training-" + k + ".txt");
			double[] input = new double[20 * 16];
			int i = 0;
			for (int[] out : temp.getInput()) {
				for (int o : out) {
					input[i] = (double) o;
					sum[k] += o;
					i++;
				}
			}
			trainingInputs[k] = input;
		}
		System.out.println("Training inputs loaded");
		for (double o : sum) {
			System.out.print(o + ",");
		}
		System.out.println();
		/*
		 * double check = 0; for (int i = 0; i < 320; i++) { check +=
		 * trainingInputs[0][i] - trainingInputs[1][i]; }
		 * System.out.println("Check = " + check);
		 */
		for (int k = 0; k < 10; k++) {
			NeuralNetworkSet temp = Filesystem.readData("c:\\nninputs\\nn-validation-" + k + ".txt");
			double[] input = new double[20 * 16];
			int i = 0;
			for (int[] out : temp.getInput()) {
				for (int o : out) {
					input[i] = (double) o;
					i++;
				}
			}
			validationInputs[k] = input;
		}
		System.out.println("Validation inputs loaded");
		MomentumNeuronLayer neuralNet = MomentumNeuronLayer.create2LayerNetwork((20 * 16), 10, 6);
		for (int i = 0; i < 100000; i++) {
			neuralNet.trainLayer(trainingInputs[i % 10], desiredOutputs[i % 10]);
		}
		double[][] z = new double[10][10];
		for (int i = 0; i < 10; i++) {
			System.out.println("Input #" + i);
			z[i] = neuralNet.testOutput(trainingInputs[i]);
			for (double out : z[i]) {
				System.out.print(out + ",");
			}
			System.out.println();
		}
	}

	public static void selectWeights() {
		// we need to load several things
		// training inputs, desired outputs, validation inputs
		double[][] trainingInputs = new double[10][20 * 16];
		double[][] desiredOutputs = new double[10][10];
		double[][] validationInputs = new double[10][20 * 16];
		// we are reading in from a fixed dir with the following naming scheme
		// nn-training-0.txt, nn-training-1.txt
		// nn-validation-0.txt, etc.

		// with the desired, we know what to expect for each input
		for (int i = 0; i < desiredOutputs.length; i++) {
			for (int j = 0; j < desiredOutputs[0].length; j++) {
				if (i == j)
					desiredOutputs[i][j] = 1;
				else
					desiredOutputs[i][j] = -1;
			}
		}
		for (int k = 0; k < 10; k++) {
			NeuralNetworkSet temp = Filesystem.readData("h:\\nninputs\\nn-training-" + k + ".txt");
			double[] input = new double[20 * 16];
			int i = 0;
			for (int[] out : temp.getInput()) {
				for (int o : out) {
					input[i] = (double) o;
					i++;
				}
			}
			trainingInputs[k] = input;
		}

		for (int k = 0; k < 10; k++) {
			NeuralNetworkSet temp = Filesystem.readData("h:\\nninputs\\nn-validation-" + k + ".txt");
			double[] input = new double[20 * 16];
			int i = 0;
			for (int[] out : temp.getInput()) {
				for (int o : out) {
					input[i] = (double) o;
					i++;
				}
			}
			validationInputs[k] = input;
		}
		MomentumNeuronLayer neuralNet = null;
		int numCycles = -1;
		int numNetworks = 0;
		while (numCycles == -1) {
			neuralNet = MomentumNeuronLayer.create2LayerNetwork((20 * 16), 10,
					10);
			numCycles = neuralNet.trainTillClassifed(trainingInputs,
					desiredOutputs, validationInputs);
			System.out.println("Net #" + numNetworks + " gave numCycles="
					+ numCycles);
			double[] z = new double[10];
			for (int i = 0; i < 10; i++) {
				z = neuralNet.classifyInput(trainingInputs[i]);
				System.out.print("Input #" + i + " ");
				for (int j = 0; j < 10; j++)
					System.out.print(z[j] + ",");
				System.out.println("");
			}

			if (numCycles > -1) {
				neuralNet.saveWeights(null);
			}
			numNetworks++;
		}

	}

	public static void getCycleError() {
		// we need to load several things
		// training inputs, desired outputs, validation inputs
		double[][] trainingInputs = new double[10][20 * 16];
		double[][] desiredOutputs = new double[10][10];
		double[][] validationInputs = new double[10][20 * 16];

		// we are reading in from a fixed dir with the following naming scheme
		// nn-training-0.txt, nn-training-1.txt
		// nn-validation-0.txt, etc.

		// with the desired, we know what to expect for each input
		for (int i = 0; i < desiredOutputs.length; i++) {
			for (int j = 0; j < desiredOutputs[0].length; j++) {
				if (i == j)
					desiredOutputs[i][j] = 1;
				else
					desiredOutputs[i][j] = -1;
			}
		}
		for (int k = 0; k < 10; k++) {
			NeuralNetworkSet temp = Filesystem.readData("c:\\nninputs\\nn-training-" + k + ".txt");
			double[] input = new double[20 * 16];
			int i = 0;
			for (int[] out : temp.getInput()) {
				for (int o : out) {
					input[i] = (double) o;
					i++;
				}
			}
			trainingInputs[k] = input;
		}

		for (int k = 0; k < 10; k++) {
			NeuralNetworkSet temp = Filesystem.readData("c:\\nninputs\\nn-validation-" + k + ".txt");
			double[] input = new double[20 * 16];
			int i = 0;
			for (int[] out : temp.getInput()) {
				for (int o : out) {
					input[i] = (double) o;
					i++;
				}
			}
			validationInputs[k] = input;
		}
		double[][] desired = new double[][] {
				{1,-1,-1,-1,-1,-1,-1,-1,-1,-1},
				{-1,1,-1,-1,-1,-1,-1,-1,-1,-1},
				{-1,-1,1,-1,-1,-1,-1,-1,-1,-1},
				{-1,-1,-1,1,-1,-1,-1,-1,-1,-1},
				{-1,-1,-1,-1,1,-1,-1,-1,-1,-1},
				{-1,-1,-1,-1,-1,1,-1,-1,-1,-1},
				{-1,-1,-1,-1,-1,-1,1,-1,-1,-1},
				{-1,-1,-1,-1,-1,-1,-1,1,-1,-1},
				{-1,-1,-1,-1,-1,-1,-1,-1,1,-1},
				{-1,-1,-1,-1,-1,-1,-1,-1,-1,1}};
		
		MomentumNeuronLayer neuralNet = null;
		neuralNet = MomentumNeuronLayer.create2LayerNetwork((20 * 16), 10, 10);
		int cycleNum = 0;
				
		cycleNum = neuralNet.trainTillGoodEnough(trainingInputs, desired, validationInputs);
		
		System.out.println("NumOfCycles= "+cycleNum);
		double[][] z = new double[6][3];
		System.out.println("ON INPUT SET");
		for (int i = 0; i < 6; i++) {
			System.out.println("Input # " + i);
			z[i] = neuralNet.testOutput(trainingInputs[i]);
			int max = max(z[i]);			
			System.out.println(" MAX at: " + max);
		}
		System.out.println("ON VALIDATION SET");
		for (int i = 0; i < 6; i++) {
			System.out.println("Validation # " + (i));
			z[i] = neuralNet.testOutput(validationInputs[i]);
			int max = max(z[i]);			
			System.out.println("MAX at: " + max);
		}
		System.out.println("FINISHED");
		
		/*double[][] cycleError = new double[2500][10];
		double[] z = new double[10];
		int cycleNum = 0;
		for (int i = 0; i < 25000; i++) {
			neuralNet.trainLayer(trainingInputs[i % 10], desiredOutputs[i % 10]);
			if (i % 10 == 0 && i != 0) {
				for (int j = 0; j < 10; j++) {
					z = neuralNet.testOutput(trainingInputs[j % 10]);
					for (int k = 0; k < 10; k++)
						cycleError[cycleNum][j] += desiredOutputs[j][k] - z[k];
				}
				cycleNum++;
			}
		}
		writeErrorCycle(cycleError);*/

	}

	public void question_3_9() {
		// input has been augmented
		double[][] in = new double[][] { { 0, 0, -1 }, { 0, 1, -1 },
				{ 1, 1, -1 }, { 1, 0, -1 } };
		double[][] desired = new double[][] { { -1 }, { 1 }, { -1 }, { 1 } };
		double[][] z = new double[4][1];
		int cycleNum = 0;
		cycleNum = this.trainTillClassifed(in, desired, in);
		System.out.println(cycleNum);

		// see if it can correctly classify them
		for (int i = 0; i < 4; i++) {
			System.out.println("Input # " + i);
			z[i] = this.classifyInput(in[i]);
			System.out.println(z[i][0]);
		}
	}

	/*
	 * Quick function to make sure that it saves and loads weights
	 */
	public static void testMomentum() {

		double[][] weights_layer1 = new double[][] {
				{ -6.9938, 6.6736, 1.5555 }, { -4.2812, 3.9127, 3.6233 } };
		double[][] weights_layer2 = new double[][] { { -0.8568, 0.3998, -1.0702 } };

		MomentumNeuronLayer neuralNet = load2LayerNetwork(weights_layer1,weights_layer2);

		neuralNet.question_3_9();
	}

	public static void testTrainTillGoodEnough() {
		double[][] weights_layer1 = new double[][] {
				{ -6.9938, 6.6736, 1.5555 }, { -4.2812, 3.9127, 3.6233 } };
		double[][] weights_layer2 = new double[][] { { -0.8568, 0.3998, -1.0702 } };

		MomentumNeuronLayer neuralNet = load2LayerNetwork(weights_layer1,weights_layer2);

		neuralNet.question_3_9_mod();
	}

	public void question_3_9_mod() {
		// input has been augmented
		double[][] in = new double[][] { { 0, 0, -1 }, { 0, 1, -1 },
				{ 1, 1, -1 }, { 1, 0, -1 } };
		double[][] desired = new double[][] { { -1 }, { 1 }, { -1 }, { 1 } };
		double[][] z = new double[4][1];
		int cycleNum = 0;
		cycleNum = this.trainTillGoodEnough(in, desired, in);
		System.out.println(cycleNum);

		// see if it can correctly classify them
		for (int i = 0; i < 4; i++) {
			System.out.println("Input # " + i);
			z[i] = this.classifyInput(in[i]);
			System.out.println(z[i][0]);
		}
	}
	
	public static void testMomentumAndTrainer() {

		double[][] weights_layer1 = new double[][] {
				{ -0.0963, -0.7669, 0.0939, 0.1987, 0.3748, -0.7527, -0.8398, 0.8681, 0.5382, 0.3438}, 
				{ -0.4828, -0.5809, -0.9887, -0.1362, -0.2494, 0.9467, -0.0115, -0.4997, -0.0001, 0.3633},
				{ 0.3310, -0.7726, -0.6481, -0.8350, 0.6724, -0.9407, 0.5389, -0.2807, 0.4985, 0.5135}};
				
		double[][] weights_layer2 = new double[][] { 
				{ 0.9081, 0.9485, -0.1513, -0.5108},
				{-0.2862, -0.4820, 0.6261, 0.5212},
				{ 0.6762, -0.4969, 0.7958, 0.3897}};

		MomentumNeuronLayer neuralNet = load2LayerNetwork(weights_layer1,weights_layer2);

		neuralNet.question_3_10();
	}
	
	public void question_3_10() {
		// input has been augmented
		double[][] in = new double[][] { 
				{ 1,1,1,1,-1,-1,1,1,1,-1}, 
				{ -1,1,-1,-1,1,-1,-1,1,-1,-1},
				{ 1,1,1,-1,1,-1,-1,1,-1,-1}};
		double[][] desired = new double[][] { 
				{ 1, -1, -1 }, 
				{ -1, 1, -1}, 
				{ -1, -1, 1}};
		double[][] validation = new double[][] {
				{ 1, 1, -1, 1, -1, -1, 1, 1, -1,-1},
				{ 1, -1, -1, 1, -1, -1, 1, -1, -1,-1},
				{-1, -1, -1, 1, 1, 1, -1, 1, -1,-1}};
		double[][] z = new double[4][1];
		int cycleNum = 0;
		cycleNum = this.trainTillGoodEnough(in, desired, validation);
		System.out.println(cycleNum);

		// see if it can correctly classify them
		for (int i = 0; i < 3; i++) {
			System.out.println("Input # " + i);
			z[i] = this.classifyInput(in[i]);
			System.out.println(z[i][0]+","+z[i][1]+","+z[i][2]);
		}
		for (int i = 0; i < 3; i++) {
			System.out.println("Validation # " + i);
			z[i] = this.classifyInput(validation[i]);
			System.out.println(z[i][0]+","+z[i][1]+","+z[i][2]);
		}
	}
	
	public void question1_part1() {
		//add in the augmented -1 input to all in
		double[][] in = new double[][] { 
				{1,1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1}, 
				{1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1,1,1,-1,-1}, 
				{1,1,1,-1,1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,-1},
				{-1,1,1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1}, 
				{-1,1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1,1,1,-1}, 
				{-1,1,1,1,-1,1,1,-1,-1,1,-1,-1,-1,1,-1,-1,-1}};
				
		double[][] desired = new double[][] {
				{1,-1,-1},
				{-1,1,-1},
				{-1,-1,1},
				{1,-1,-1},
				{-1,1,-1},
				{-1,-1,1}};
		
		double[][] validation = new double[][] {
				{1,1,1,1,-1,-1,1,-1,-1,-1,1,-1,-1,1,1,1},
				{1,1,1,1,1,-1,-1,-1,1,1,-1,1,1,1,1,1},
				{1,1,1,-1,1,-1,-1,-1,1,1,-1,-1,1,-1,-1,-1},
				{-1,-1,-1,-1,1,1,1,1,-1,1,-1,-1,-1,1,-1,-1},
				{1,1,1,1,1,-1,-1,-1,1,-1,-1,1,1,1,1,1},
				{1,1,1,1,1,1,1,-1,1,-1,-1,-1,1,-1,-1,-1}};
		
		int cycleNum = 0;
		cycleNum = this.trainTillGoodEnough(in, desired, in);
		
		System.out.println("NumOfCycles= "+cycleNum);
		double[][] z = new double[6][3];
		System.out.println("ON INPUT SET");
		for (int i = 0; i < 6; i++) {
			System.out.println("Input # " + i);
			z[i] = this.testOutput(in[i]);
			int max = max(z[i]);			
			System.out.println(z[i][0]+","+z[i][1]+","+z[i][2]+" MAX at: " + max);
		}
		System.out.println("ON VALIDATION SET");
		for (int i = 0; i < 6; i++) {
			System.out.println("Validation # " + (i));
			z[i] = this.testOutput(validation[i]);
			int max = max(z[i]);			
			System.out.println(z[i][0]+","+z[i][1]+","+z[i][2]+" MAX at: " + max);
		}
		System.out.println("FINISHED");
				
	}
	
	public static void testAssign2() {
		double[][] w1 = new double[][] {
				{-0.7402, -0.0612, -0.3258, 0.5886, 0.0571, 0.2040, 0.3082, 0.4963, -0.8324, 0.8267, 0.6516, 0.9923, -0.1146, 0.9238, 0.5498, 0.7374, -0.2004},
				{0.1376, -0.9762, -0.6756, -0.3776, -0.6687, -0.4741, 0.3784, -0.0989, -0.5420, -0.6952, 0.0767, -0.8436, -0.7867, -0.9907, 0.6346, -0.8311, -0.4803} 
		};
		
		double[][] w2 = { 	
				{0.5144, 0.1356, 0.0616},
				{0.5075,-0.8483,0.5583},
				{-0.2391,-0.8921,0.8680}};
		
		MomentumNeuronLayer neuralNet = load2LayerNetwork(w1,w2);
		//MomentumNeuronLayer neuralNet = create2LayerNetwork(17,3,3);
		neuralNet.question1_part1();
	}
	
	/*
	 * We need "size" weights per neuron, per class. Lets start with fixed, 10 classes, 320 inputs
	 */
	public static MomentumNeuronLayer create1LayerNetwork(int size) {
		double[][] weights = new double[10][320];
		for (int i = 0; i < 10; i++) {
			weights[i] = generateRandomWeights(320);
		}
		
		MomentumNeuronLayer neuralNet = new MomentumNeuronLayer(10, weights, 0.3);
		
		return neuralNet;
	}
	
	/*
	 * Note: Doesn't work yet, uses a different learning rule
	 */
	public static void question_3_5() {
		double[][] w1 = new double[][] {
				{1,-2,0},{0,-1,2},{1,3,-1}};
		double[][] inputSet = new double[][] {
				{10,2,-1},{2,-5,-1}, {-5,5,-1}};
		double[][] desiredSet = new double[][] {
				{1,-1,-1},{-1,1,-1}, {-1,-1,1}};
		MomentumNeuronLayer neuralNet = new MomentumNeuronLayer(3,w1,1); //careful, fixed correlation rule not delta learning rule that we use
		for (int i = 0; i < 120; i ++) {
			neuralNet.trainLayer(inputSet[i%3], desiredSet[i%3]);
		}
		//now try to classify

		double[][] z = new double[3][3];
		System.out.println("ON INPUT SET");
		for (int i = 0; i < 3; i++) {
			System.out.println("Input # " + i);
			z[i] = neuralNet.testOutput(inputSet[i]);
			int max = max(z[i]);			
			System.out.println(" MAX at: " + max);
		}
	}
	
	public static void run1LayerTest() {
		double[][] trainingInputs = new double[10*2][20 * 16 + 1];
		double[][] desiredOutputs = new double[10*2][10];
		double[][] validationInputs = new double[10*2][20 * 16 + 1];
		//load in training inputs
		String fileLoc = "c:\\nninputs\\train-";
		int numTrain =0;
		int numVal = 0;
		for (int i = 0; i < 10; i++) {
			for (int j = 0; j < 2; j++) {
				NeuralNetworkSet temp = Filesystem.readData(fileLoc+i+"-"+j);
				double[] input = new double[20 * 16 + 1];
				int n = 0;
				for (int[] out : temp.getInput()) {
					for (int o : out) {
						input[n] = (double) o;
						n++;
					}
				}
				input[320] = -1;
				trainingInputs[numTrain] = input;
				numTrain++;
			}
			for (int j = 2; j < 4; j++) {
				NeuralNetworkSet temp = Filesystem.readData(fileLoc+i+"-"+j);
				double[] input = new double[20 * 16 +1];
				int n = 0;
				for (int[] out : temp.getInput()) {
					for (int o : out) {
						input[n] = (double) o;
						n++;
					}
				}
				input[320] = -1;
				validationInputs[numVal] = input;
				numVal++;
			}
		} 	
		// with the desired, we know what to expect for each input
		for (int i = 0; i < desiredOutputs.length; i++) {
			for (int j = 0; j < desiredOutputs[0].length; j++) {
				if (i%10 == j)
					desiredOutputs[i][j] = 1;
				else
					desiredOutputs[i][j] = -1;
			}
		}
		
		MomentumNeuronLayer neuralNet = MomentumNeuronLayer.create1LayerNetwork((20 * 16));
		int cycleNum = 0;
				
		cycleNum = neuralNet.oneLayerTrainer(trainingInputs, desiredOutputs, validationInputs);
		
		System.out.println("NumOfCycles= "+cycleNum);
		double[][] z = new double[10][3];
		System.out.println("ON INPUT SET");
		for (int i = 0; i < 10; i++) {
			System.out.print("Input # " + i);
			z[i] = neuralNet.testOutput(trainingInputs[i]);
			int max = max(z[i]);			
			System.out.print(" MAX at: " + max);
			z[i] = neuralNet.testOutput(trainingInputs[i+10]);
			max = max(z[i]);
			System.out.println(" MAX at: " + max);
		}
		System.out.println("FINISHED");
		
		System.out.println("ON VALIDATION SET");
		for (int i = 0; i < 10; i++) {
			System.out.print("Input # " + i);
			z[i] = neuralNet.testOutput(validationInputs[i]);
			int max = max(z[i]);			
			System.out.print(" MAX at: " + max);
			z[i] = neuralNet.testOutput(validationInputs[i+10]);
			max = max(z[i]);
			System.out.println(" MAX at: " + max);
		}
		System.out.println("FINISHED");
		
	}
	
	public int oneLayerTrainer(double[][] trainingInputs, double[][] desired, double[][] validationInputs) {
		// there are n inputs
		int numOfInputs = trainingInputs.length;
		int trainingCycleNum = 0;

		double[] outputValidation;
		double[] outputTraining;
		boolean success = false;
		//This is the map of the following
		//	TrainingCycleNum ->
		//				Input Number ->
		//					Error On Each Pt in the Matrix
		//	We should simplify this
		//	TrainingCycleNum ->
		//				Error on each Input
		//This would be a HashMap<Integer, ArrayList<Double>>
		//We don't actually care about the individual points
		HashMap<Integer, ArrayList<Double>> cycleErrorTraining = new HashMap<Integer, ArrayList<Double>>();		
		HashMap<Integer, ArrayList<Double>> cycleErrorValidation = new HashMap<Integer, ArrayList<Double>>();
		
		ArrayBlockingQueue<Double[][]> weightsHidden = new ArrayBlockingQueue<Double[][]>(4); // save the past 4 weights
		ArrayBlockingQueue<Double[][]> weightsOutput = new ArrayBlockingQueue<Double[][]>(4);
		// run until the validation error is increasing for 3 consecutive
		// periods
		// when this condition occurs, go back and grab the weights from the
		// last period before it was increasing
		// make these the current weights in the neural net
		// return the cycle num
		while (!success) {
			// train for all inputs
			for (int i = 0; i < numOfInputs; i++) {
				this.trainLayer(trainingInputs[i], desired[i]);
			}
			trainingCycleNum++;
			ArrayList<Double> tempErrorTraining = new ArrayList<Double>();
			ArrayList<Double> tempErrorValidation = new ArrayList<Double>();
			
			// now try to go through all the inputs and classify them
			for (int i = 0; i < numOfInputs; i++) {
				outputValidation = this.testOutput(validationInputs[i]);
				outputTraining = this.testOutput(trainingInputs[i]);
				double cycleError = 0;
				
				for (int j = 0; j < outputTraining.length; j++) {
					cycleError += desired[i][j] - outputTraining[j];
				}
				//Cycle Error now contains the error for that input for this cycle
				tempErrorTraining.add(cycleError);
				
				cycleError = 0;				
				for (int j = 0; j < outputTraining.length; j++) {
					cycleError += desired[i][j] - outputValidation[j];
				}
				tempErrorValidation.add(cycleError);	
			}
			cycleErrorTraining.put(trainingCycleNum, tempErrorTraining);
			cycleErrorValidation.put(trainingCycleNum, tempErrorValidation);
			
			if (trainingCycleNum > 4) {
				double[] cycleErrorSum = new double[4];
				
				for (int j = 0; j < 4; j++) {
					cycleErrorSum[j] = 0;
					ArrayList<Double> cycleError = cycleErrorValidation.get(trainingCycleNum-j); //grab the last 4
					for (Double d : cycleError) { //sum up all the errors for this cycle
							cycleErrorSum[j] += d;
					}
					cycleErrorSum[j] = Math.pow(cycleErrorSum[j], 2) * 0.5; 
					//System.out.println("Cycle #"+(trainingCycleNum-j)+" has error " + cycleErrorSum[j]);
				}
				boolean errorUp = true;
/*				boolean[] lazyErrorUp = new boolean[3];
				for (int j = 2; j > 0; j--) {
					if (cycleErrorSum[j] < cycleErrorSum[j+1])
						lazyErrorUp[j] = true;
					else
						lazyErrorUp[j] = false;
				}
				if (lazyErrorUp[0] && lazyErrorUp[1] && lazyErrorUp[2])
					errorUp = true;*/
				int numClassified = 0;
				for (int j = 0; j < numOfInputs; j++) {
					int max = max(this.testOutput(trainingInputs[j]));
					if (max == (j % 10)) {
						errorUp = errorUp && true;
						numClassified++;
					}
					else
						errorUp = false;
				}
				System.out.print("CycleNum #"+trainingCycleNum+" numClassified="+numClassified+"/20 = "+(numClassified*5)+"%");	
				numClassified = 0;
				for (int j = 0; j < numOfInputs; j++) {
					int max = max(this.testOutput(validationInputs[j]));
					if (max == (j % 10)) 
						numClassified++;	
				}
				System.out.println(" numClassified="+numClassified+"/20 = "+(numClassified*5)+"%");
				
				if (errorUp) {
					//System.out.println("Error is increasing, stop and reset the weights");
					success = true;
					Double[][] weights = null;
					while (weightsHidden.size() > 0) {
						try {
							weights = weightsHidden.take();
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
					}
					this.lastWeight = fromDouble(weights);
					while (weightsOutput.size() > 0) {
						try {
							weights = weightsOutput.take();
						} catch (InterruptedException e) {
							e.printStackTrace();
						}
					}
					this.nextLayer.lastWeight = fromDouble(weights);
				} else {
					//System.out.println("Error is not increasing, continue and save current weights");
					weightsHidden.remove();
					weightsHidden.add(toDouble(this.getLastWeights()));
				
				
				}
			} else {
				weightsHidden.add(toDouble(this.getLastWeights()));
				
			}
			// additional check to make sure we don't loop for ever
			if (trainingCycleNum > 200) {
				success = true;
				trainingCycleNum = -1;
			}
		}
		//write out the error info first
		//we have the following data structure HashMap<Integer,ArrayList<double>>>
		//we need to make this into a double[][]
		Double[][] errorCurveTraining = new Double[cycleErrorTraining.size()][1];
		Double[][] errorCurveValidation = new Double[cycleErrorTraining.size()][1];
		int i = 0;
		Double[] tempArray = new Double[1];
		for (ArrayList<Double> values : cycleErrorTraining.values()) {
			errorCurveTraining[i] = values.toArray(tempArray);
			i++;
		}
		i = 0;
		for (ArrayList<Double> values : cycleErrorValidation.values()) {
			errorCurveValidation[i] = values.toArray(tempArray); 
			i++;
		}
		writeErrorCycle(fromDouble(errorCurveTraining));
		writeErrorCycle(fromDouble(errorCurveValidation));
		
		return trainingCycleNum;
	}
	

}
