package eg.edu.eelu.fyp2013.jdetector.core.classifier;

import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;

import org.neuroph.core.NeuralNetwork;
import org.neuroph.core.data.DataSet;
import org.neuroph.core.data.DataSetRow;
import org.neuroph.nnet.MultiLayerPerceptron;
import org.neuroph.nnet.learning.BackPropagation;
import org.neuroph.util.TransferFunctionType;

import eg.edu.eelu.fyp2013.jdetector.core.input_output.ClassiferData;
import FullKLDA.*;
public class NerualNetworkClassifier {
	
	public NerualNetworkClassifier (){
		
		
	}
	
	//final int inputSize = 10;
	final int outputSize = 1;
	final int inputNeurons = 1;
	final int hiddenNeurons = 3;
	final  TransferFunctionType transferFunction = TransferFunctionType.SIGMOID;
	//	final static LearningRule learningRule ; 

	 ArrayList<Double[]> learnData = new ArrayList<Double[]>();
	 ArrayList<Double[]> testData = new ArrayList<Double[]>();

	 public double [] desiredlabels;
	 double learningRate = 0.5;
	 double maxError = 0.000001;
	 int maxIterations= 1000000;




	public DataSet iterateLearnDataset(ClassiferData [] Learned, int inputSize){
		DataSet learningDataSet = new DataSet(inputSize,outputSize);
		// for loop to add learning dataset
		for (int i = 0; i < Learned.length; i++) {
		 
			    double [] reducefeature = new double [Learned[i].Allreduceddata.size()];
				for(int j = 0; j < Learned[i].Allreduceddata.size(); j++)
				{
					reducefeature[j] = Learned[i].Allreduceddata.get(j);
				}
				double [] label = new double [1] ; 
				
				label[0]= Learned[i].label;
			    DataSetRow row = new DataSetRow(reducefeature,label);
			    learningDataSet.addRow(row);
				
		}
		return learningDataSet;
	}

	public DataSet iterateTestDataset(ClassiferData [] Tested, int inputSize){
		
		DataSet TestingDataSet = new DataSet(inputSize,outputSize);
		// for loop to add learning dataset
		desiredlabels = new double [Tested.length];
		for (int i = 0; i < Tested.length; i++) {
		 
			    double [] reducefeature = new double [Tested[i].Allreduceddata.size()];
			    //desiredlabels = new double [reducefeature.length];
				for(int j = 0; j < Tested[i].Allreduceddata.size(); j++)
				{
					reducefeature[j] = Tested[i].Allreduceddata.get(j);
				}
				desiredlabels[i] = Tested[i].label;
				double [] label = new double [1] ; 
				label[0]= Tested[i].label;
			    DataSetRow row = new DataSetRow(reducefeature,label);
			    TestingDataSet.addRow(row);
				
		}
		return TestingDataSet;
	}
	
	
	
public DataSet iterateClassifyDataset(ClassiferData [] classified, int inputSize){
		
	DataSet classifyDataSet = new DataSet(inputSize, outputSize);	
	// for loop to add classifying dataset
		for (int i = 0; i < classified.length; i++) {
		 
			    double [] reducefeature = new double [classified[i].Allreduceddata.size()];
				for(int j = 0; j < classified[i].Allreduceddata.size(); j++)
				{
					reducefeature[j] = classified[i].Allreduceddata.get(j);
				}
				double [] label = new double [1] ; 
				label[0]=(Double) null;
			    DataSetRow row = new DataSetRow(reducefeature,label);
			    classifyDataSet.addRow(row);
				
		}
		return classifyDataSet;
	}


	public void multiNNBuild (ClassiferData [] Learned ){
		// create neural network
		MultiLayerPerceptron multiNeuralNetwork = new MultiLayerPerceptron( transferFunction, inputNeurons, hiddenNeurons, 1); 

		int inputsize = Learned[0].Allreduceddata.size();
		
		DataSet learningDataSet = iterateLearnDataset(Learned,inputsize);
		//DataSet testingDataSet = new DataSet (10, 1);
		// learn the training set

		BackPropagation learningRule = new BackPropagation();

		learningRule.setLearningRate(learningRate);
		learningRule.setMaxError(maxError);
		learningRule.setMaxIterations(maxIterations);

		multiNeuralNetwork.setLearningRule(learningRule);

		//NeuronProperties neuronProperties = new NeuronProperties();
		//neuronProperties.setProperty("useBias", true);
	//	multiNeuralNetwork.calculate();

		multiNeuralNetwork.learn(learningDataSet);



		// use bias nerurons and check no. of nerurons then
		//data set type supervised or not


		// save trained neural network
		multiNeuralNetwork.save("multiNeuralNetwork.nnet");

	}


	public String [] multiNNTest (ClassiferData [] Tested){

		int inputsize = Tested[0].Allreduceddata.size();
		
		DataSet testingDataSet = iterateTestDataset(Tested,inputsize);
		
		// load saved neural network
		NeuralNetwork loadedMlPerceptron = NeuralNetwork.createFromFile("multiNeuralNetwork.nnet");

		// test loaded neural network
		String [] result = calculateNeuralNetwork(loadedMlPerceptron, testingDataSet);
		return result;



	}
	
	
	
	public String [] multiNNclassify (ClassiferData [] classified) throws IOException{

		int inputsize = classified[0].Allreduceddata.size();
		
		DataSet classifyDataSet =  iterateClassifyDataset(classified, inputsize) ;
		
		// load saved final neural network
		ClassLoader cl = new ClassLoader(){} ;
		NeuralNetwork loadedFinalMlPerceptron = NeuralNetwork.load(cl.getResourceAsStream("multiNeuralNetwork.nnet"));
		//NeuralNetwork loadedFinalMlPerceptron = NeuralNetwork.createFromFile("multiNeuralNetwork.nnet");

		// test loaded neural network
		String [] result = calculateNeuralNetwork(loadedFinalMlPerceptron, classifyDataSet);
		return result;

	}
	
	public String [] multiNNclassify (double [] testedvalues) throws IOException{


		// load saved final neural network
		ClassLoader cl = new ClassLoader(){} ;
		NeuralNetwork loadedFinalMlPerceptron = NeuralNetwork.load(cl.getResourceAsStream("multiNeuralNetwork.nnet"));
		//NeuralNetwork loadedFinalMlPerceptron = NeuralNetwork.createFromFile("multiNeuralNetwork.nnet");

		// test loaded neural network
		String [] result = calculateNeuralNetwork(loadedFinalMlPerceptron, testedvalues);
		return result;

	}

	public String [] calculateNeuralNetwork(NeuralNetwork nnet, DataSet testSet) {
 
		//String input;
		int i = 0;
		double [] networkOutput;
		String [] output = new String [testSet.size()];
		for(DataSetRow dataRow : testSet.getRows()) {
			nnet.setInput(dataRow.getInput());
			nnet.calculate();
			networkOutput = nnet.getOutput();
			//input = Arrays.toString(dataRow.getInput());
			output[i] = Arrays.toString(networkOutput);
			
			i++;
			}
		
		return output;

	}
	
	public String [] calculateNeuralNetwork(NeuralNetwork nnet, double [] testedvalues) {
		 
		//String input;
		//int i = 0;
		double [] networkOutput;
		String [] output = new String [testedvalues.length];
		for(int i =0 ; i< testedvalues.length; i++) {
			nnet.setInput(testedvalues[i]);
			nnet.calculate();
			networkOutput = nnet.getOutput();
			//input = Arrays.toString(dataRow.getInput());
			output[i] = Arrays.toString(networkOutput);
			
			}
		
		return output;

	}
	
	public double [] showdataintable (double [] actualoutputs)
	{
		// outputs // testlabel;
		
		double [] ANNresult = new double [7];
		
		//ANNresult [0] // TP
		//ANNresult [1] // TN 
		//ANNresult [2] // FP
		//ANNresult [3] // FN
		//ANNresult [4] // Sen
		//ANNresult [5] // Spec
		//ANNresult [6] // Acc
		
		
	  //	testlabels.length ; // outputs
		
		
		for(int i = 0; i < actualoutputs.length ; i++)
		{
		     if(desiredlabels[i] == 1)
		     {
		    	 if(actualoutputs[i] <= 0.2)
		    	 {
		    		 ANNresult[3]++;
		    	 }
		    	 if(actualoutputs[i] >= 0.8)
		    	 {
		    		 ANNresult[0]++;
		    	 }
		    		 
		    	 
		     }
		     if(desiredlabels[i] == 0)
		     {
		    	 if(actualoutputs[i] <= 0.2)
		    	 {
		    		 ANNresult[1]++;
		    	 }
		    	 if(actualoutputs[i] >= 0.8)
		    	 {
		    		 ANNresult[2]++;
		    	 }
		     }
		}
		    // (TP / TP + FN) * 100;
		     //sen
		     ANNresult[4] = (ANNresult[0] / (ANNresult[0] + ANNresult[3])) * 100;
		    //TN / (FP + TN ) * 100  
		     //spec
		     ANNresult[5] = (ANNresult[1] / (ANNresult[2] + ANNresult[1])) * 100;
		    // Acc
		     //(TN + TP)/(TN+TP+FN+FP)
		     ANNresult[6] = ((ANNresult[1] + ANNresult[0])/ (ANNresult[2] + ANNresult[0] + ANNresult[3] + ANNresult[1])) * 100;
		     
		    
		    return ANNresult;	 
		    	 
		
	}

	public void  Savelearnedfeaturesandlabel (ClassiferData [] learned) throws IOException
	{
		FileWriter File = new FileWriter("neuralClassifiedLearneddata");
		PrintWriter pw = new PrintWriter(File);
		
		pw.println(learned.length);
		for(int i = 0 ; i < learned.length; i++)
		{
			
			for(int j = 0; j < learned[i].Allreduceddata.size() ; j++)
			{
				 
				pw.print(learned[i].Allreduceddata.get(j) + " " + ",");
			}
			pw.print("=" + learned[i].label);
			pw.println();
		}
		File.close();
		pw.close();
		
	}

	
}



