package mfccExtraction;

import java.io.*;
import java.util.LinkedList;

import android.util.Log;

public class Bkprop {

	/************************************************************ 
	;   Backpropagation with momentum                           * 
	;   by Andres Perez-Uribe                                   *
	;   HEIG-VD March/2008                                      * 
	;                                                           *
	;   Original version:                                       *
	;   C code by A. Perez-Uribe                                *
	;   Universidad del Valle, Cali, Colombia                   *
	;   sep/93                                                  *
	;                                                           *
	;   Email : andres DOT perez-uribe AT heig-vd.ch            * 
	;           REDS Institute @ HEIG-VD                        *
	;           University of Applied Sciences of Western       *
	;           Switzerland (HES-SO)                            *
	;************************************************************

	   References :
	   -  G. Hinton, "How neural networks learn from experience",
	      Scientific American, sep 1992.
	   -  P. Werbos,  "The Roots of Backpropagation: From ordered derivatives 
	      to Neural Neworks and Political Forecasting", John Wiley and Sons, 
	      New York, 1994

	    Compile : javac Bkprop.java
	    Run : java Bkprop

	    There is no guarantee that the code will do what you
	    expect or that it is error free. It is simply meant
	    to provide a useful way to experiment with the
	    Backpropagation learning algorithm.

	    Update Oct 7/99...thanks to Stephane Pouyet <pouyet@nist.gov>

	    Last update March/2007 ... change of variable names for the sake of clarity
	    The original names came from Hinton's paper.  

	    First Java adaptation by Christophe Scalfo, HEIG-VD, September/2007
	    Second Java version by Andres Perez-Uribe, HEIG-VD, March/2008
	 */

	final int INPUTS = 104;    /* number if inputs */
	final int HIDDEN = 8 ;     /* number of hidden units */
	final double EPSILON = 0.1;  /* maximum Mean Square Error to stop training */
	final int NUMTEST = 1; /* number of test patterns */
	private double delta = 0.01;     /* learning rate */
	private double alfa = 0.14;      /* momentum */
	private final int nbEpoch = 1000;

	final int OUTPUTS;    /* number of outputs */
	int NUMTRAIN;    /* number of training patterns */
	final String trainfile;
	final String testfile;

	/* synaptic weights */
	private double[][] inhiddw;
	private double[][] hidoutw;

	/* delta_weights memorization for Backprop with momentum */
	private double[][] deltaihw;
	private double[][] deltahow;

	/* input, hidden and output vector values */
	private double[] x;
	private double[] y;
	private double[] z;


	public Bkprop(int nbOutputs, String trainingSet, String predictionSet){



		this.OUTPUTS= nbOutputs;    /* number of outputs */

		trainfile = trainingSet;
		testfile = predictionSet;

		LinkedList<String> trainingCases;

		this.NUMTRAIN = 0; //Initialisation in case where try fail

		try {

			trainingCases = FileManager.chargerFichier(trainfile);

			this.NUMTRAIN = trainingCases.size();    /* number of training patterns */
		} catch (IOException e) {
			e.printStackTrace();

		}

		/* synaptic weights */
		inhiddw = new double[INPUTS][HIDDEN];
		hidoutw = new double[HIDDEN][OUTPUTS];

		/* delta_weights memorization for Backprop with momentum */
		deltaihw = new double[INPUTS][HIDDEN];
		deltahow = new double[HIDDEN][OUTPUTS];

		/* input, hidden and output vector values */
		x= new double[INPUTS];
		y= new double[HIDDEN];
		z= new double[OUTPUTS];


		patterns = new double[NUMTRAIN][INPUTS];
		/* desired outputs */
		desout= new double[NUMTRAIN][OUTPUTS];

		/* delta error of the hidden neurons */
		ehid = new double[HIDDEN];
		/* delta error of the output neurons */
		eout = new double[OUTPUTS];
		/* learned patterns */
		lrnpatr = new boolean[NUMTRAIN];
		/* squared mean error */
		sme = new double[NUMTRAIN];

		matrizin = new double[NUMTEST][INPUTS];



	}

	//Sigmoid activation function
	public double sigm(double x){
		return 1/(1 + Math.exp(-x));
	}

	//Derivative of the sigmoid activation function 
	public double dxsigm(double y){
		return (y)*(1-y);
	}

	/* training patterns */
	private double[][] patterns;
	/* desired outputs */
	private double[][]   desout;
	/* delta error of the hidden neurons */
	private double[] ehid;
	/* delta error of the output neurons */
	private double[] eout;
	/* learned patterns */
	private boolean[] lrnpatr;
	/* squared mean error */
	private double[] sme;
	private double[][] matrizin;



	public void saveNetwork(String path)
	{

		try
		{
			//create FileOutputStream object
			FileOutputStream fos = new FileOutputStream(path);
			DataOutputStream dos = new DataOutputStream(fos);

			//Export synaptic wheights input - hidden
			for(int input = 0; input < INPUTS; input++)
			{
				for(int hidden = 0; hidden < HIDDEN; hidden++)
				{
					dos.writeDouble(inhiddw[input][hidden]);
				}
			}
			//Export synaptic wheights hidden - outputs
			for(int hidden = 0; hidden < HIDDEN; hidden++)
			{
				for(int output = 0; output < OUTPUTS; output++)
				{

					dos.writeDouble(hidoutw[hidden][output]);
				}
			}

			dos.close();

		}
		catch (IOException e)
		{
			System.out.println("IOException : " + e);
		}


	}

	public void InitializeNetworkFromFile(String path)
	{
		try
		{
			//create FileOutputStream object
			FileInputStream fis = new FileInputStream(path);
			DataInputStream dis = new DataInputStream(fis);

			//Export synaptic wheights input - hidden
			for(int input = 0; input < INPUTS; input++)
			{
				for(int hidden = 0; hidden < HIDDEN; hidden++)
				{
					inhiddw[input][hidden] = dis.readDouble();
				}
			}
			//Export synaptic wheights hidden - outputs
			for(int hidden = 0; hidden < HIDDEN; hidden++)
			{
				for(int output = 0; output < OUTPUTS; output++)
				{

					hidoutw[hidden][output] = dis.readDouble();
				}
			}
			dis.close();
		}
		catch (IOException e)
		{
			System.out.println("IOException : " + e);
		}

	}


	//Loading a training database
	public void read_data(){
		BufferedReader lecteurAvecBuffer = null;
		String ligne;
		String[] trame;

		try
		{
			lecteurAvecBuffer = new BufferedReader(new FileReader(trainfile));
		}
		catch(FileNotFoundException exc)
		{
			System.out.println("Error loading the training database");
		}
		try
		{
			for(int i =0; i<NUMTRAIN; i++){
				ligne = lecteurAvecBuffer.readLine();
				trame = ligne.split("\t");
				System.out.println(ligne);

				/* input patterns */
				for(int j=0;j<trame.length-OUTPUTS;j++){
					patterns[i][j] = Double.parseDouble(trame[j]);
				}
				/* desired outputs */
				int kLocal = 0;
				for(int k=(trame.length-OUTPUTS); k<trame.length; k++){
					desout[i][kLocal]= Integer.parseInt(trame[k]);
					kLocal++;

				}
			}	
			lecteurAvecBuffer.close();
		}
		catch (IOException e)
		{
			System.out.println("Error loading the training database");
		}		
	}

	/* initialization */
	public void init()
	{
		int i,j;
		double randomvalue;
		read_data();

		/* weight initialization */
		for(i=0;i<INPUTS;i++)
			for(j=0;j<HIDDEN;j++) {
				randomvalue = Math.random();
				inhiddw[i][j] =  -0.5 + randomvalue;
				deltaihw[i][j] = 0;
			}

		for(i=0;i<HIDDEN;i++)
			for(j=0;j<OUTPUTS;j++) {
				randomvalue = Math.random();
				hidoutw[i][j] =  -0.5 + randomvalue;
				deltahow[i][j] = 0;
			}

		for(i=0;i<NUMTRAIN;i++)
			lrnpatr[i] = false;
	}

	// Neural network training
	void training()
	{
		int i,j,t; //,rep;           
		boolean l;
		//float somme = 0;
		int iterations = 0;
		System.out.println("\nTRAINING\n");
		i=0; j=0;
		do {
			do {
				// select a random training pattern: i = (int)(NUMTRAIN*rnd), where 0<rnd<1 
				i = (int)(NUMTRAIN*Math.random());
				iterations++;
				if(iterations % 100 == 0) 
					System.out.println(iterations);
				if(i==NUMTRAIN)
					i--;
			} while(lrnpatr[i]);
			j++;
			netanswer(patterns[i]);  
			backprop(i);
			error();
			l = true;
			//somme = 0;

			/* test if a given pattern is properly recognized */
			for(t=0;t<NUMTRAIN;t++) {
				lrnpatr[t] = sme[t] < EPSILON;
				l = l && lrnpatr[t];
			}
		} while(!(l || (iterations >= nbEpoch)));


		System.out.println("\n\n End of training\n");
		System.out.println(iterations+"\n");
	}

	/* copute the outputs of he neural network */
	void netanswer(double[] afer)
	{
		int i,j;
		double totin;
		for(i=0;i<INPUTS;i++)
			x[i] = afer[i];
		for(j=0;j<HIDDEN;j++) {
			totin = 0;
			for(i=0;i<INPUTS;i++) 
				totin = totin + x[i]*inhiddw[i][j];
			y[j] = sigm(totin);
		}
		for(j=0;j<OUTPUTS;j++) {
			totin = 0;
			for(i=0;i<HIDDEN;i++)
				totin = totin + y[i]*hidoutw[i][j];
			z[j] = sigm(totin);
		}

	}

	double se(double[] a,double[] b,int SIZE)   /* Error measure */
	{
		int i;
		double e=0;

		for(i=0;i<SIZE;i++)
			e = e + (a[i] - b[i])*(a[i] - b[i]);
		e = 0.5 * e;
		return e;
	}

	void betaout(int patternnum)   /* error of the output neurons */
	{
		int k;
		for(k=0;k<OUTPUTS;k++)
			eout[k] = 0;

		for(k=0;k<OUTPUTS;k++)
			eout[k] = (z[k] - desout[patternnum][k])*(z[k]*(1.0 - z[k]));
	}

	void betahid()   /* error of the hidden neurons */
	{
		int j,k;
		for(j=0;j<HIDDEN;j++)
			ehid[j] = 0;

		for(j=0;j<HIDDEN;j++) 
			for(k=0;k<OUTPUTS;k++)
				ehid[j] = ehid[j] + hidoutw[j][k]*(y[j]*(1-y[j])*eout[k]);
	}

	/* Backpropagation of the error and weight update */
	void backprop(int patternnum)
	{
		int i,j,k;
		double temp;

		betaout(patternnum); 
		betahid();

		for(j=0;j<HIDDEN;j++)
			for(k=0;k<OUTPUTS;k++) {
				temp = -delta*y[j]*eout[k];
				hidoutw[j][k] = hidoutw[j][k] + temp + alfa*deltahow[j][k];
				deltahow[j][k] = temp;
			}

		for(i=0;i<INPUTS;i++)
			for(j=0;j<HIDDEN;j++) {
				temp = -delta*x[i]*ehid[j];
				inhiddw[i][j] = inhiddw[i][j] + temp + alfa*deltaihw[i][j];
				deltaihw[i][j] = temp;
			}
	}   

	/* Output error */
	void error()
	{
		int i;

		for(i=0;i<NUMTRAIN;i++) {
			netanswer(patterns[i]);
			sme[i]=se(desout[i],z,OUTPUTS);
		}
	}

	/* Test of the trained neural network */
	public LinkedList<String> test()
	{

		LinkedList<String> results = new LinkedList<String>();
		BufferedReader lecteurAvecBuffer = null;
		String ligne;
		String[] trame;

		try
		{
			lecteurAvecBuffer = new BufferedReader(new FileReader(testfile));
		}
		catch(FileNotFoundException exc)
		{
			System.out.println("Error reading the test database");
		}

		try
		{
			for(int i = 0; i<1; i++){
				ligne = lecteurAvecBuffer.readLine();
				trame = ligne.split("\t");
				//System.out.println(ligne);
				for(int j=0;j<trame.length;j++){
					matrizin[i][j] = Double.parseDouble(trame[j]);
				}
				/* present a test pattern and compute the neural network output */
				netanswer(matrizin[i]);
				Log.v("LOL",i+": ");
				String outputVector ="";
				for(int j=0;j<OUTPUTS;j++){
					/* show the outputs with an activation larger than 0.6 */
					outputVector += "," + z[j];
				}
				outputVector = outputVector.substring(1);
				results.add(outputVector);
			}
			lecteurAvecBuffer.close();
		}
		catch (IOException e)
		{
			System.out.println("Error reading the test database");
		}	

		return results;
	}
}
