package backproped;

import gui.Debug;
import gui.Filesystem;
import gui.NeuralNetworkSet;
import info.monitorenter.gui.chart.Chart2D;
import info.monitorenter.gui.chart.ITrace2D;
import info.monitorenter.gui.chart.traces.Trace2DSimple;

import java.awt.Color;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.text.DecimalFormat;
import java.util.Date;
import java.util.Vector;

import javax.swing.JFrame;

public class NeuralNetwork
{

	//user defined variables
	public static int numEpochs = 120; //number of training cycles
	public static int numInputs  = 320+1; //number of inputs - this includes the input bias
	public static int numHidden  = 6; //number of hidden units
	public static int numPatterns = 110; //number of training patterns
	public static double learningRate_inputToHidden = 0.0002; //learning rate
	public static double learningRate_hiddenToOutput = 0.0004; //learning rate

	//process variables
	
	
	public static int desiredPatterns[] = new int[10]; // track the training process
	
	public static int patNum;
	public static double errThisPat;
	public static double outPred;

	//last RMS error for training and validation sets
	public static double RMSerror;
	public static double RMSValError;
	//complete log of RMS error for training and validation sets
	public static double[] RMSerrorlog = new double[numEpochs+1];
	public static double[] RMSValErrorLog = new double[numEpochs+1];

	//The inputs contain the following sets
	// 0 - 50: Training Set
	// 50 - 100: Validation Set
	// 100 - 160: Training Set
	public static double[][] trainInputs  = new double[numPatterns+60][numInputs];
	//These are the expected outputs from the training sets
	//Will be of a particular class 0 - 9
	public static double[] trainOutput = new double[numPatterns+60];

	//the outputs of the hidden neurons
	public static double[] hiddenVal  = new double[numHidden];

	//the weights for the input -> hidden and hidden->output respectively
	public static double[][] weightsIH = new double[numInputs][numHidden];
	public static double[] weightsHO = new double[numHidden];

	/*
	 * Start point of the run standalone
	 * Creates a set of random weights for the network
	 * Loads in the training, validation, and testing sets
	 * Trains the network for a number of epochs over the whole training set
	 * Display the final error, create graphs with the error curve
	 * 
	 */
	public static void main(String[] args)
	{

		//initiate the weights
		initWeights();

		//load in the training, validation and test data 
		loadData();

		//train the network
		for(int j = 0; j <= numEpochs; j++)
		{
			/*Integer[] selectedPatterns = new Integer[numPatterns];
			ArrayList<Integer> expectedArray = new ArrayList<Integer>(numPatterns);*/
			
			for(int i = 0; i<numPatterns; i++)
			{

				//select a pattern at random
				//patNum = (int)((Math.random()*numPatterns)-0.001);
				patNum = i;
				 
				//calculate the current output and error for this pattern
				calcNet();
				desiredPatterns[patNum % 9]--;
				//update weights
				updateHiddenToOutputWeights();
				updateInputToHiddenWeights();
				//selectedPatterns[i] = patNum;
			}
			/*for (int i = 0; i < numPatterns; i++) {
				expectedArray.add(i);
			}
			for (Integer i : selectedPatterns) {
				if (expectedArray.contains(i))
					expectedArray.remove(i);
			}
			for (Integer i : expectedArray) {
				System.out.print(i+",");
			}
			System.out.println();*/
			
			// Once the full training set has been passed through, calculate the overall error points
			calcOverallError();
			calcOverallValError();
			System.out.println("epoch = " + j + "  RMS Error = " + RMSerror + " VAL RMS Error = " + RMSValError);
			RMSerrorlog[j] = RMSerror;
			RMSValErrorLog[j] = RMSValError;
		}

		//training has finished once all the epochs have been run through 
		//display the results
		displayResults();
		System.out.println("Validation");
		displayValidationResults();
		//System.out.println("Test");
		//displayTestResults();

		//lowest val point
		double min = 10;
		int lowestPt = 0, j = 0;
		for (double d: RMSValErrorLog) {
			if (d < min) {
				min = d;
				lowestPt = j;
			}
			j++;
		}
		System.out.println("Lowest VAL ERROR = " + min + " at epoch = " + lowestPt);
		
		// Create a chart:  
		Chart2D chart = new Chart2D();
		// Create an ITrace: 
		ITrace2D errorCurve = new Trace2DSimple(); 
		ITrace2D validationCurve = new Trace2DSimple();
		// Add all points, as it is static: 
		
		for(int i=0;i<RMSerrorlog.length;i++){
			errorCurve.addPoint(i,RMSerrorlog[i]);
		}

		for(int i=0; i<RMSValErrorLog.length; i++)
		{
			validationCurve.addPoint(i, RMSValErrorLog[i]);
		}
		validationCurve.setName("Validation Curve");
		validationCurve.setColor(new Color(58,131,220));
		
		
		errorCurve.setName("Error Curve");
		errorCurve.setColor(new Color(255,175,50));
		
		
		// Add the trace to the chart: 
		chart.addTrace(errorCurve);  
		chart.addTrace(validationCurve);
		
		
		System.out.println("Desired Patterns Leftover:\t"+ Debug.printArray(desiredPatterns));

		// Make it visible:
		// Create a frame.
		Date time = new Date();
		JFrame frame = new JFrame(time.toString());
		// add the chart to the frame: 
		frame.getContentPane().add(chart);
		frame.setSize(400,300);
		// Enable the termination button [cross on the upper right edge]: 
		frame.addWindowListener(
				new WindowAdapter(){
					public void windowClosing(WindowEvent e){
						System.exit(0);
					}
				}
		);
		frame.show();
	}

	public NeuralNetwork() {
		//initiate the weights
		initWeights();

		//load in the training, validation and test data 
		loadData();

		//train the network
		for(int j = 0; j <= numEpochs; j++)
		{

			for(int i = 0; i<numPatterns; i++)
			{

				//select a pattern at random
				patNum = (int)((Math.random()*numPatterns)-0.001);

				//calculate the current output and error for this pattern
				calcNet();

				//update weights
				updateHiddenToOutputWeights();
				updateInputToHiddenWeights();
			}
			
			// Once the full training set has been passed through, calculate the overall error points
			calcOverallError();
			calcOverallValError();
			System.out.println("epoch = " + j + "  RMS Error = " + RMSerror + " VAL RMS Error = " + RMSValError);
			RMSerrorlog[j] = RMSerror;
			RMSValErrorLog[j] = RMSValError;
		}		
	}
	/*
	 * Calculate the output of the hidden neurons based upon the inputs going through the tanh function
	 * Calculate the output of the output neurons in a linear fashion
	 */
	public static void calcNet()
	{
		//calculate the outputs of the hidden neurons
		//the hidden neurons are tanh
		for(int i = 0;i<numHidden;i++)
		{
			hiddenVal[i] = 0.0;

			for(int j = 0;j<numInputs;j++)
				hiddenVal[i] = hiddenVal[i] + (trainInputs[patNum][j] * weightsIH[j][i]);

			hiddenVal[i] = tanh(hiddenVal[i]);
		}

		//calculate the output of the network
		//the output neuron is linear
		outPred = 0.0;

		for(int i = 0;i<numHidden;i++)
			outPred = outPred + hiddenVal[i] * weightsHO[i];

		//calculate the error
		errThisPat = outPred - trainOutput[patNum];
	}


	/*
	 * Update the weights of outputs neurons based delta learning rule
	 * The weight change is the error for this pattern as a whole * the last output * learning rate 
	 */
	public static void updateHiddenToOutputWeights()
	{
		for(int k = 0;k<numHidden;k++)
		{
			double weightChange = learningRate_hiddenToOutput * errThisPat * hiddenVal[k];
			weightsHO[k] = weightsHO[k] - weightChange;

			//regularisation on the output weights
			if (weightsHO[k] < -5)
				weightsHO[k] = -5;
			else if (weightsHO[k] > 5)
				weightsHO[k] = 5;
		}
	}


	/*
	 * Update the weights of the hidden neurons based upon the delta learning rule with back propagation
	 *  First x = 1 - (output ^ 2) * outputWeight * error * learning Rate
	 *  Then x * input
	 *  
	 */
	public static void updateInputToHiddenWeights()
	{
		for(int i = 0;i<numHidden;i++)
		{
			for(int k = 0;k<numInputs;k++)
			{
				double x = 1 - (hiddenVal[i] * hiddenVal[i]);
				x = x * weightsHO[i] * errThisPat * learningRate_inputToHidden;
				x = x * trainInputs[patNum][k];
				double weightChange = x;
				weightsIH[k][i] = weightsIH[k][i] - weightChange;
			}
		}
	}


//************************************
 public static void initWeights()
 {

  for(int j = 0;j<numHidden;j++)
  {
    weightsHO[j] = (Math.random() - 0.5)/2;
    for(int i = 0;i<numInputs;i++)
    weightsIH[i][j] = (Math.random() - 0.5)/5;
  }

 }

//************************************
 public static void loadData()
 {
	 Vector<String> filesTraining = new Vector<String>();
	 
	 Vector<String> filesValidation = new Vector<String>();
	 Vector<String> filesTesting = new Vector<String>();
	 Vector<String> files = new Vector<String>();
	 
	 int inputNum = 0;
	 System.out.println("loading data");
	 
	 //load all training files
	 File trainingPath = new File(System.getProperty("user.dir") + "\\src\\samples");
	 //System.out.println(trainingPath); // Print all training paths
	 String[] trainingDir = trainingPath.list();
	 	 
	 for(int i=0; i<trainingDir.length; i++){
			
			File file = new File(trainingPath, trainingDir[i]);
			
			if(file.isDirectory()){
				// Do nothing if a Directory
			} else {
				if (file.getName().contains("train"))
				{
					filesTraining.add(file.getAbsolutePath());
					files.add(file.getAbsolutePath());
				} else if (file.getName().contains("test"))
				{
					filesTesting.add(file.getAbsolutePath());
					files.add(file.getAbsolutePath());
				} else if (file.getName().contains("validation")) {
					
				} else 
				{
					System.out.println("unknown input set encountered");
				}
		
				//System.out.println(file.getAbsolutePath());
			}
	 }
	 
	 int maxFiles = files.size();
	 int fileIndex = 0;
	 //k need to go through the 50 files we have
	 //read 0, 5, 10, 15, 20, 25, ...
	 //then read 1, 6, 11, ...
	 int totalPatterns = 0;
	 int cycleOfPatterns = 0;
	 for (cycleOfPatterns  = 0; cycleOfPatterns  < 11; cycleOfPatterns++) {
		/* if (cycleOfPatterns == 5)
			 cycleOfPatterns = 12;*/
		 for (int i = 0; i < 10; i++) {
			 //read pattern data from file 
			 NeuralNetworkSet patternData = Filesystem.readData(filesTraining.elementAt(i*11 + cycleOfPatterns));

			 for (int[] y : patternData.getInput())
			 {
				 for (int x : y)
				 {
					 trainInputs[totalPatterns][inputNum] = x;
					 inputNum++;
				 }
			 }

			 trainInputs[totalPatterns][inputNum] = 1;//Bias
			 trainOutput[totalPatterns] = patternData.getDesired();
			 desiredPatterns[patternData.getDesired()]++;
			 // reset +increment variables for next pattern
			 inputNum = 0;
			 totalPatterns++;
		 } 
	 }
	 for (cycleOfPatterns  = 0; cycleOfPatterns  < 6; cycleOfPatterns++) {
		 for (int i = 0; i < 10; i++) {
			 //read pattern data from file 
			 NeuralNetworkSet patternData = Filesystem.readData(filesTesting.elementAt(i*6 + cycleOfPatterns));

			 for (int[] y : patternData.getInput())
			 {
				 for (int x : y)
				 {
					 trainInputs[totalPatterns][inputNum] = x;
					 inputNum++;
				 }
			 }

			 trainInputs[totalPatterns][inputNum] = 1;//Bias
			 trainOutput[totalPatterns] = patternData.getDesired();

			 // reset +increment variables for next pattern
			 inputNum = 0;
			 totalPatterns++;
		 } 
	 }

	 System.out.println("Inputs loaded");
	 
 }
 


//************************************
 public static double tanh(double x)
 {
    if (x > 20)
        return 1;
    else if (x < -20)
        return -1;
    else
        {
        double a = Math.exp(x);
        double b = Math.exp(-x);
        return (a-b)/(a+b);
        }
 }


//************************************
 public static void displayResults()
 {
	int matchesFound = 0;
	int samples = 0;
	int match1 = 0, match2 = 0, match3= 0, match4 = 0, match5 = 0, match6 = 0, match7 = 0, match8 = 0, match9 = 0, match0 = 0;
     for(int i = 0;i<numPatterns;i++)
        {
        patNum = i;
        calcNet();
		DecimalFormat FourDP = new DecimalFormat("#0.0");
       // System.out.println("pattern(" + (patNum+1) + ") actual = " + trainOutput[patNum] + " neural model = " + FourDP.format(outPred) + " rounded = " + Math.round(outPred));
        
		if(trainOutput[patNum] == Math.max(0, Math.round(outPred)))
	    {
	    	matchesFound++;
	    	if (Math.max(0, Math.round(outPred)) == 0)
    		{
    			match0++;
    		} else if (Math.max(0, Math.round(outPred)) == 1)
    		{
    			match1++;
    		}else if (Math.max(0, Math.round(outPred)) == 2)
    		{
    			match2++;
    		}else if (Math.max(0, Math.round(outPred)) == 3)
    		{
    			match3++;
    		}else if (Math.max(0, Math.round(outPred)) == 4)
    		{
    			match4++;
    		}else if (Math.max(0, Math.round(outPred)) == 5)
    		{
    			match5++;
    		}else if (Math.max(0, Math.round(outPred)) == 6)
    		{
    			match6++;
    		}else if (Math.max(0, Math.round(outPred)) == 7)
    		{
    			match7++;
    		}else if (Math.max(0, Math.round(outPred)) == 8)
    		{
    			match8++;
    		}else if (Math.max(0, Math.round(outPred)) == 9)
    		{
    			match9++;
    		}
	    }
	    samples++;
	}
	
	System.out.println(matchesFound + " / " + samples + " ( " + ((double)matchesFound/samples*100) +  "% ) correctly classified samples found in the training set");
	System.out.println("Matches Found Report:\n  0 : \t" + match0 + "\n  1 : \t" + match1 + "\n  2 : \t" + match2 + "\n  3 : \t" + match3 + "\n  4 : \t" + match4 + "\n  5 : \t" + match5 + "\n  6 : \t" + match6 + "\n  7 : \t" + match7 + "\n  8 : \t" + match8 + "\n  9 : \t" + match9 + "");
 }
 
public static void displayValidationResults() {
	
	int matchesFound = 0, samples = 0;
	
	int match1 = 0, match2 = 0, match3= 0, match4 = 0, match5 = 0, match6 = 0, match7 = 0, match8 = 0, match9 = 0, match0 = 0;
	for(int i = 110;i<170;i++)
     {
		patNum = i;
		calcNet();
		DecimalFormat FourDP = new DecimalFormat("#0.0");
		//System.out.println("pattern(" + (patNum+1) + ") actual = " + trainOutput[patNum] + " neural model = " + FourDP.format(outPred)+ " rounded = " + Math.round(outPred));
		if(trainOutput[patNum] == Math.max(0, Math.round(outPred)))
	    {
	    	matchesFound++;
    		if (Math.max(0, Math.round(outPred)) == 0)
    		{
    			match0++;
    		} else if (Math.max(0, Math.round(outPred)) == 1)
    		{
    			match1++;
    		}else if (Math.max(0, Math.round(outPred)) == 2)
    		{
    			match2++;
    		}else if (Math.max(0, Math.round(outPred)) == 3)
    		{
    			match3++;
    		}else if (Math.max(0, Math.round(outPred)) == 4)
    		{
    			match4++;
    		}else if (Math.max(0, Math.round(outPred)) == 5)
    		{
    			match5++;
    		}else if (Math.max(0, Math.round(outPred)) == 6)
    		{
    			match6++;
    		}else if (Math.max(0, Math.round(outPred)) == 7)
    		{
    			match7++;
    		}else if (Math.max(0, Math.round(outPred)) == 8)
    		{
    			match8++;
    		}else if (Math.max(0, Math.round(outPred)) == 9)
    		{
    			match9++;
    		}
	    }
	    samples++;
	}
	
	System.out.println(matchesFound + " / " + samples + " ( " + ((double)matchesFound/samples*100) +  "% ) correctly classified samples found in the validation set");
	System.out.println("Matches Found Report:\n  0 : \t" + match0 + "\n  1 : \t" + match1 + "\n  2 : \t" + match2 + "\n  3 : \t" + match3 + "\n  4 : \t" + match4 + "\n  5 : \t" + match5 + "\n  6 : \t" + match6 + "\n  7 : \t" + match7 + "\n  8 : \t" + match8 + "\n  9 : \t" + match9 + "");
}

	public static void displayTestResults() {
		
		int match1 = 0, match2 = 0, match3= 0, match4 = 0, match5 = 0, match6 = 0, match7 = 0, match8 = 0, match9 = 0, match0 = 0;

		int matchesFound = 0;
		int samples = 0;
		for (int i = 110; i< 170; i++) {
			patNum = i;
			calcNet();
			DecimalFormat FourDP = new DecimalFormat("#0.0");
		    //System.out.println("pattern(" + (patNum+1) + ") actual = " + trainOutput[patNum] + " neural model = " + FourDP.format(outPred)+ " rounded = " + Math.round(outPred));
		    if(trainOutput[patNum] == Math.max(0, Math.round(outPred)))
		    {
		    	matchesFound++;
		    	if (Math.max(0, Math.round(outPred)) == 0)
	    		{
	    			match0++;
	    		} else if (Math.max(0, Math.round(outPred)) == 1)
	    		{
	    			match1++;
	    		}else if (Math.max(0, Math.round(outPred)) == 2)
	    		{
	    			match2++;
	    		}else if (Math.max(0, Math.round(outPred)) == 3)
	    		{
	    			match3++;
	    		}else if (Math.max(0, Math.round(outPred)) == 4)
	    		{
	    			match4++;
	    		}else if (Math.max(0, Math.round(outPred)) == 5)
	    		{
	    			match5++;
	    		}else if (Math.max(0, Math.round(outPred)) == 6)
	    		{
	    			match6++;
	    		}else if (Math.max(0, Math.round(outPred)) == 7)
	    		{
	    			match7++;
	    		}else if (Math.max(0, Math.round(outPred)) == 8)
	    		{
	    			match8++;
	    		}else if (Math.max(0, Math.round(outPred)) == 9)
	    		{
	    			match9++;
	    		}
		    }
		    samples++;
		}
		
		System.out.println(matchesFound + " / " + samples + " ( " + ((double)matchesFound/samples*100) +  "% ) correctly classified samples found in the test set");
		System.out.println("Matches Found Report:\n  0 : \t" + match0 + "\n  1 : \t" + match1 + "\n  2 : \t" + match2 + "\n  3 : \t" + match3 + "\n  4 : \t" + match4 + "\n  5 : \t" + match5 + "\n  6 : \t" + match6 + "\n  7 : \t" + match7 + "\n  8 : \t" + match8 + "\n  9 : \t" + match9 + "");
	}
//************************************
	public static void calcOverallError() {
     RMSerror = 0.0;
     for(int i = 0;i<numPatterns;i++)
        {
        patNum = i;
        calcNet();
        RMSerror = RMSerror + (errThisPat * errThisPat);
        }
     RMSerror = RMSerror/numPatterns;
     RMSerror = java.lang.Math.sqrt(RMSerror);
    }

	public static void calcOverallValError() {
		RMSValError = 0.0;
		for(int i = 100;i<150;i++)
		{
			patNum = i;
			calcNet();
			RMSValError = RMSValError + (errThisPat * errThisPat);
		}
		RMSValError = RMSValError/10;
		RMSValError = java.lang.Math.sqrt(RMSValError);
	    	
	}

}