package gui;



import info.monitorenter.gui.chart.Chart2D;
import info.monitorenter.gui.chart.ITrace2D;
import info.monitorenter.gui.chart.traces.Trace2DSimple;

import java.util.Date;
import java.util.Random;
import java.util.Vector;
import java.awt.Color;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.ClipboardOwner;
import java.awt.datatransfer.StringSelection;
import java.awt.datatransfer.Transferable;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.*;
//import string

import javax.swing.JFrame;

//random.seed(0)
public class bpnn implements Serializable, ClipboardOwner
{ 
  static Random generator = new Random();
  static bpnn.pattern[] trainingSet = null;
  static bpnn.pattern[] validationSet = null;
  
  static double[] RMSerrorlog;
  static double[] RMSValErrorLog;
  // Vars

  int ni;
  int nh;
  int no;
  double N,M;
  double[] ai;
  double[] ah;
  double[] ao;
  public double[][] wi;
  public double[][] wo;
  double[][] ci;
  double[][] co;
  
  
  
  
  
  public static class pattern
  {
   double[] input;
   double[] target;
   
   pattern(double[] in,double[] out)
   {
     input=in;
     target=out;
   }
   
  }
  
 
// calculate a random number where:  a <= rand < b
public static double rand(double a, double b)
{   double retval;
    
    retval=(b-a)*generator.nextDouble() + a;
    return retval;
    
}

// Make a matrix 
public static double[][] makeMatrix(int I, int J)
{   double [][] m = new double[I][J];  
    return m;
}

public static double tanh(double x)
{ 

  return (Math.exp(x)- Math.exp(-x)) / (Math.exp(x)+Math.exp(-x));
  

}
// our sigmoid function, tanh is a little nicer than the standard 1/(1+e^-x)
public static double sigmoid(double x)
 {
 	return tanh(x);
  }

// derivative of our sigmoid function
public static double dsigmoid(double y)
 {
 	return 1.0-y*y;
  }
  

bpnn(int ini,int inh, int ino)
    {   int i,j;
    
        //# number of input, hidden, and output nodes
        ni = ini + 1; // +1 for bias node
        nh = inh;
        no = ino;
        ai= new double[ni];
        ah= new double[nh];
        ao= new double[no];
        
        
        N=0.5;// N: learning rate 0.5 good;
        M=0.1;// M: momentum factor 0.1 good
        
        
        //# init activations for nodes
        for(i=0;i<ni;i++){ai[i] = 1.0;}
        for(i=0;i<nh;i++){ah[i] = 1.0;}
        for(i=0;i<no;i++){ao[i] = 1.0;}
       
       
        
        // create weights
        wi = new double[ni][nh];  //makeMatrix(self.ni, self.nh)
        wo = new double[nh][no];  //makeMatrix(self.nh, self.no)
        
        // set them to random vaules
        for (i=0;i<ni;i++)
          {  for (j=0;j<nh;j++)wi[i][j] = rand(-1.5, 1.5);
          }      
        for (i=0;i<nh;i++)
          {  for (j=0;j<no;j++)wo[i][j] = rand(-1.5, 1.5);
		  }
		   
        // last change in weights for momentum   
        ci = new double[ni][nh];   //makeMatrix(self.ni, self.nh)
        co = new double[nh][no];   //makeMatrix(self.nh, self.no)
    }
    
    public  double[] update(double[] inputs)
    { int i,j;
      double sum;
        //if len(inputs) != self.ni-1:
        //    raise ValueError, 'wrong number of inputs'

        // input activations
        for (i=0;i<ni-1;i++){ai[i] = inputs[i];}
        
          
        //hidden activations
        for (j=0;j<nh;j++)
        {
            sum = 0.0;
            for (i=0;i<ni;i++){ sum = sum + (ai[i] * wi[i][j]) ;}
            ah[j] = sigmoid(sum);
		}
		
        //output activations        
        for (j=0;j<no;j++)
        {        
            sum = 0.0;
            for(i=0;i<nh;i++){ sum = sum + (ah[i] * wo[i][j]) ;}
            ao[j] = sigmoid(sum);
        }
        
        return ao;
    }

    public  double backPropagate(double[] targets,boolean updateWeights)
    {   int i,j,k;
    	double error,change;
        //if len(targets) != self.no:
        //    raise ValueError, 'wrong number of target values'
 
        // calculate error terms for output
        double[] output_deltas = new double[no];//[0.0] * self.no
        
        for(k=0;k<no;k++)
        {
        
            error = targets[k]-ao[k];
            output_deltas[k] = dsigmoid(ao[k]) * error;
		}
		
        // calculate error terms for hidden
        double [] hidden_deltas = new double[nh];  // [0.0] * self.nh
        for(j=0;j<nh;j++)
        {
        
            error = 0.0;
            for(k=0;k<no;k++){ error = error + output_deltas[k]*wo[j][k]; }
            hidden_deltas[j] = dsigmoid(ah[j]) * error;
         }
        //update output weights
        for(j=0;j<nh;j++)
        {       
            for(k=0;k<no;k++)
            {            
                change = output_deltas[k]*ah[j];
                if (updateWeights==true)
                {
	                wo[j][k] = wo[j][k] + N * change + M * co[j][k];
	                co[j][k] = change;
                }
                // System.out.println(print N*change, M*self.co[j][k]
              
             }
         }
        // update input weights
        for(i=0;i<ni;i++)
        {        
            for(j=0;j<nh;j++)
            {            
                change = hidden_deltas[j]*ai[i];
                if (updateWeights==true)
                {
	                wi[i][j] = wi[i][j] + N*change + M*ci[i][j];
	                ci[i][j] = change;
                }
            }
        } 
         
        // calculate error
        error = 0.0;
        for(k=0;k<no;k++) { error = error + 0.5 * (targets[k]-ao[k])*(targets[k]-ao[k]);}// please correct this (targets[k]-ao[k])*(targets[k]-ao[k]) must be x^2
        
        
        return error;
     }

    public  void test(pattern[] patterns)
    { double[] res;
    
    	int correctRecogs = 0;
    	int incorrectRecogs = 0;
        for (int p=0;p<patterns.length;p++)
        { res=update(patterns[p].input);
          

          double testmax = -2;
          int testpos = -1;
          double desiredmax = -2;
          int desiredpos = -1;
          for(int j=0;j<res.length;j++)
    	  {
        	  //System.out.println("[Input "+p+"] ->  " + res[j]);
        	  if(testmax< res[j])
        	  {
        		  testmax = res[j];
        		  testpos = j;
        	  }
        	  
        	  if(desiredmax < patterns[p].target[j])
        	  {
        		  desiredmax = patterns[p].target[j];
        		  desiredpos = j;
        	  }
        	  
    	  }
          if (testpos==desiredpos)
          {
        	System.out.println("correctly classified " + desiredpos);
        	correctRecogs++;
          } else {
        	  System.out.println("incorrectly classified " + desiredpos + " as " + testpos);  
        	  incorrectRecogs++;
          }
        }
        int totalTested = correctRecogs + incorrectRecogs;
        double accuracy = (double)correctRecogs/totalTested*100;
        System.out.println("Accuracy: " + correctRecogs + "/" + totalTested + "\t" + accuracy + "%");
    }
    
    
    
    public  void weights()
    {   int i,j;
        System.out.println("Input weights:");
        for(i=0;i<ni;i++)
        {
            System.out.println(wi[i]);
         }
        System.out.println(" ");
        System.out.println("Output weights:");
        for(i=0;i<nh;i++)
        {
            System.out.println(wo[i]);
         }
        
      }
	
	public double[] online(pattern data)
	{ double[] retval= update(data.input);
	  backPropagate(data.target, true); 
	  return retval;
	}
	
	public double[] GetResponse(double[] input)
	{ return update(input);}
	
    public  void train(pattern[] patterns,int iterations, double Lr,double  Momenta)
    {   
        N=Lr;
        M=Momenta;
        // N: learning rate 0.5 good
        // M: momentum factor 0.1 good
        int i,p;
        double error=0.0;
		double valError = 0.0;
		int length = 0;
        for(i=0;i<iterations;i++)
        {
            error = 0.0;
            valError = 0.0;
            if(validationSet.length<patterns.length)
            {
            	length = validationSet.length;
            } else {
            	length = patterns.length;
            }
            
            for(p=0;p<length;p++)
            {
                                
                update(patterns[p].input);                
                error += backPropagate(patterns[p].target, true);
                
                update(validationSet[p].input);
                valError += backPropagate(validationSet[p].target,false);
                
            }
            if (i%100== 0) System.out.println(/*"error = "+*/ error /*+"\t\tepoch:"+i*/);
            RMSerrorlog[i] = Math.sqrt(error);
            RMSValErrorLog[i] = Math.sqrt(valError);
		 }

    }
    
    
    public  void singletrain(pattern pat,int iterations)
   {   
        double error;		        
        for(int i=0;i<iterations;i++)
        {
            error = 0.0;        
            update(pat.input);                
            error = error + backPropagate(pat.target, true);           
            //if (i%10 == 0) System.out.println("error = "+ error);            
		 }

    }


    public static void loadInputs()
	{
		boolean debug = false;
		
		Vector<String> filesTraining = new Vector<String>();
		Vector<String> filesValidation = new Vector<String>();
		Vector<String> filesTesting = new Vector<String>();
		
		Vector<String> files = new Vector<String>();
		
		
		System.out.print("\tLoading Inputs");
		
		//load all training files
		File samplePath = new File(System.getProperty("user.dir") + "\\src\\samples"); // Location of samples
		String[] sampleDirList = samplePath.list(); // List of sample directory contents
			 
		for(int i=0; i<sampleDirList.length; i++){
			
			File file = new File(samplePath, sampleDirList[i]);
				
				if(file.isDirectory()){
					// Do nothing if a Directory
				} else {
					if (file.getName().contains("train"))
					{
						filesTraining.add(file.getAbsolutePath());
						
						if (debug || Debug.DEBUG) System.out.println("\t" + file.getAbsolutePath());
					} else if (file.getName().contains("test"))
					{
						filesTesting.add(file.getAbsolutePath());
					} else if (file.getName().contains("validation")) {
						if (debug || Debug.DEBUG) System.out.println("\t" + file.getAbsolutePath());
						filesValidation.add(file.getAbsolutePath());
					} else 
					{
						System.err.println("unknown input set encountered");
					}
					files.add(file.getAbsolutePath());
				}
		 }
		


		// Init Variables
		trainingSet = new bpnn.pattern[filesTraining.size()]; 
		validationSet = new bpnn.pattern[filesValidation.size()];
		
		// load Training Set
		 int maxFiles = files.size();
		 int numTrainingPatterns = 0; 
		 for(String trainingSampleLoc: filesTraining)
		 {
			 int inputNum = 0;
			 NeuralNetworkSet patternData = Filesystem.readData(trainingSampleLoc);
			 double tempInput[] = new double[321];
			 for (int[] y : patternData.getInput())
			 {
				 for (int x : y)
				 {
					 tempInput[inputNum] = x;
					 inputNum++;
				 }
				 tempInput[inputNum] = -1;
			 }
			 
			 double[] tempDesired = new double[10];
			 for(int desOutputNum = 0;desOutputNum<10; desOutputNum++)
			 {
				 if(patternData.getDesired() == desOutputNum)
				 {
					 tempDesired[desOutputNum] = 1;
				 } else {
					 tempDesired[desOutputNum] = -1;
				 }
			 } 
			 
			 trainingSet[numTrainingPatterns] = new bpnn.pattern(tempInput,tempDesired);
			 numTrainingPatterns++;
		 }
		 
		 // load Validation Set
		 int numValidationPatterns = 0; 
		 for(String validationSampleLoc: filesValidation)
		 {
			 int inputNum = 0;
			 NeuralNetworkSet patternData = Filesystem.readData(validationSampleLoc);
			 double tempInput[] = new double[321];
			 for (int[] y : patternData.getInput())
			 {
				 for (int x : y)
				 {
					 tempInput[inputNum] = x;
					 inputNum++;
				 }
				 tempInput[inputNum] = -1;
			 }
			 
			 double[] tempDesired = new double[10];
			 for(int desOutputNum = 0;desOutputNum<10; desOutputNum++)
			 {
				 if(patternData.getDesired() == desOutputNum)
				 {
					 tempDesired[desOutputNum] = 1;
				 } else {
					 tempDesired[desOutputNum] = -1;
				 }
			 } 
			 
			 validationSet[numValidationPatterns] = new bpnn.pattern(tempInput,tempDesired);
			 numValidationPatterns++;
		 }
	 
		 
		 System.out.println("...[Done]");
	}
    
    public static void graphResults(String details)
    {
		// Create a chart:  
		Chart2D chart = new Chart2D();
		// Create an ITrace: 
		ITrace2D errorCurve = new Trace2DSimple(); 
		ITrace2D validationCurve = new Trace2DSimple();
		// Add all points, as it is static: 
		String forClipboard = "";
		System.out.println("\n\nRMS Error Log");
		for(int i=0;i<RMSerrorlog.length-1;i++){
			errorCurve.addPoint(i,RMSerrorlog[i]);
			System.out.println(RMSerrorlog[i]);
			forClipboard =forClipboard + RMSerrorlog[i] + "\n";
		}

		System.out.println("\n\nRMS Validation Error Log");
		forClipboard ="\n";
		for(int i=0; i<RMSValErrorLog.length-1; i++)
		{
			validationCurve.addPoint(i, RMSValErrorLog[i]);
			System.out.println(RMSValErrorLog[i]);
			forClipboard =forClipboard + RMSerrorlog[i] + "\n";
		}
		validationCurve.setName("Validation Curve");
		validationCurve.setColor(new Color(58,131,220));
		
		
		errorCurve.setName("Error Curve");
		errorCurve.setColor(new Color(255,175,50));

		
		// Add the trace to the chart: 
		chart.addTrace(errorCurve);  
		chart.addTrace(validationCurve);
		
		
		
		// Make it visible:
		// Create a frame.
		Date time = new Date();
		JFrame frame = new JFrame(time.toString() + details);
		// add the chart to the frame: 
		frame.getContentPane().add(chart);
		frame.setSize(400,300);
		// Enable the termination button [cross on the upper right edge]: 
		frame.addWindowListener(
				new WindowAdapter(){
					public void windowClosing(WindowEvent e){
						System.exit(0);
					}
				}
		);
		frame.show();
    }
    
    

    public static void main(String args[])
	{   
    	/*
		//# Teach network XOR function
		bpnn.pattern[] pat= new bpnn.pattern[4];
		bpnn net = new bpnn(2, 3, 1);
		 
	    pat[0] = new bpnn.pattern(new double[]{0,0},new double[]{-1});
	    pat[1] = new bpnn.pattern(new double[]{1,0},new double[]{1});
	    pat[2] = new bpnn.pattern(new double[]{0,1},new double[]{1});
	    pat[3] = new bpnn.pattern(new double[]{1,1},new double[]{-1});
	    */
	    int epochs = 2000;
	    double learningRate = 0.00004;//0.01;
	    double momentum = 0.001;//0.000001;
	    int numInputs = 320;
	    int numHiddenNodes = 6;
	    int numOutputs = 10;
	    
	    System.out.println("Epoch:\t"+ epochs + "\nLearningRate:\t"+learningRate+"\nmomentum:\t"+momentum +"\nHiddenNodes\t" + numHiddenNodes);
	    
		RMSerrorlog = new double[epochs+1];
		RMSValErrorLog = new double[epochs+1];
	    //bpnn net = new bpnn(2, 3, 1);
	    
	    bpnn net = new bpnn(numInputs, numHiddenNodes, numOutputs);
	    

	    
	    loadInputs();
	    
	    
	    //# train it with some patterns
	    //net.train(pat,1000,0.5,0.1);
	    
	    net.train(trainingSet, epochs, learningRate, momentum);
	    //# test it
	    net.test(trainingSet);
	    System.out.println("\n\nTestValidationSet");
	    net.test(validationSet);
	    
	    
	    
	    
	    graphResults("  Epoch: "+ epochs + " LearningRate: "+learningRate+" momentum: "+momentum +" HiddenNodes: " + numHiddenNodes);
	    

	}

	@Override
	public void lostOwnership(Clipboard clipboard, Transferable contents) {
		// TODO Auto-generated method stub
		
	}
	
	public void setClipboard(String aString)
	{
		StringSelection stringSelection = new StringSelection( aString );

	    Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
	    clipboard.setContents(stringSelection, this );
	}
}
