package ClassificationAlgorithm;

import java.io.BufferedWriter;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Random;

import AggregationMethod.AggregationMethodFactory;
import AggregationMethod.AggregationMethodInterface;
import Definitions.GraphClass;
import Definitions.NodeClass;
import Global.ConstantVariable;
import Global.GlobalClass;
import InputPreparer.InputPreparationMethodInterface;
import InputPreparer.InputPreparerFactory;
import LocalClassifier.LocalClassifierInterface;
import LocalClassifier.LocalClassifierTest;
import Result.EvaluationClass;
import Result.ResultClass;
import Result.ResultClassListClass;
import Result.ResultIdentificationClass;
import Sampling.SamplingAbstractClass;
import Sampling.SamplingFactory;
import Sampling.SamplingTests;
import Utility.FileUtilityClass;
/*****************************************************************
* Class Name:	ICA 
* Aim: 				Test the ICA  with given parameters
* Algorithms: ICA
*****************************************************************/
public class ICAClass extends CCAClass {
	
	String name;
	double stabilityThreshold;
	int iterationForStability;
	public ICAClass(GraphClass graph, GlobalClass global,LocalClassifierInterface classifier, String name) 
	{
		super(graph, global,classifier, name);
			
		this.name=ConstantVariable.CCA_ConstantVariables.ICA;
		this.stabilityThreshold =(double)0.95;
		this.iterationForStability=20;
	}

	public void setThreshold(double threshold, int iteration)
	{
		this.stabilityThreshold =threshold;
		this.iterationForStability=iteration;	
	}
	
	/*****************************************************************
	* Function Name:			Classifier   
	* Aim: 						To work the ICA algorithms with the given parameters
	* Inputs:
	*  				
	* Graph g:					defines which graph is used while CCA
	*int lamda					defines the lamda value that will be used in LRTrain
	* Outputs:
	* CCA algorithm is processed on the graph. 
	*  
	* Data Structures:
	* Graph
	* Node
	*  
	* Algorithms:
	* Local Classifier
	* Neighbourhood Functions 
	*****************************************************************/

	 public void Run(SamplingAbstractClass currentSampling, ResultClass result)
	 {
		 //TODO Kadriye: Organization
		  try{
				// Create file 
		FileWriter fstream = new FileWriter("ICA"+result.getIdentifier().getFold() +"_.txt");
		BufferedWriter out = new BufferedWriter(fstream);

				out.write("ICA_Fold:"+ result.getIdentifier().getFold());
				out.newLine();
				//	 System.out.println("START ICA RUN................. ");
		 int[] orderArray = initializeOrderArray(currentSampling.getTestNodes().size());
		 
		 ArrayList<NodeClass> train1Nodes = GetTraning1Nodes(currentSampling);
		 double iterationsResultsForTrain1Nodes[][] = new double[100][train1Nodes.size()+2];
		 
		 EvaluationClass Evaluation = new EvaluationClass(currentSampling);
		 
		 for(int i=0; i< orderArray.length; i++)
		 {
			 orderArray[i]=i;
		 }
			
		localClassifierSelected.initialize(graph, result);
		//This method trains the dataset and finds a W matrix.
		long startTimeTraining = System.currentTimeMillis();
			localClassifierSelected.train(graph);
		long endTimeTraining = System.currentTimeMillis();
		//This method defines predict labels of the train+val nodes equal to their actual labels
		Random random = new Random();
		int sizeOfClass = global.classList.size();
//		System.out.println(" ICA RUN: After training ");
		
		for(int i=0; i< currentSampling.getTestNodes().size() ;i++)
		{
			int randomClass= random.nextInt(sizeOfClass);
			currentSampling.setClassLabelEstimated(currentSampling.getTestNodes().get(i), randomClass);
		}
		//System.out.println(" ICA RUN: Random Labelling  ");
		for(int i=0; i< currentSampling.getTestNodes().size() ;i++)
		{
			localClassifierSelected.evaluate(currentSampling.getTestNodes().get(i), ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, "start");
		}

		long startTimeICA = System.currentTimeMillis();
		//firstLabelling

		double start = Evaluation.findTotalAccuracy(Evaluation.CreateConfMat(currentSampling.getTestNodes()));
				
		out.write("start: "+start);
		out.newLine();
		//System.out.println("After first labelling  Start "+ start);
		//calls the iterative labelling method that means each test node will be labelled and the other test node's
		//neighborhood relations will be changed with respect to previous assigments	
		boolean isStable= false;
		int stability=0;
		int previousLabel[]= new int[currentSampling.getTestNodes().size()];				  /// initialize the previous label
		double currentAccuracy;
		int iterationNumber=0;
		
		while(!isStable)  //check the stability of data set
		 {

			reOrder(orderArray);
			for(int a = 0 ; a < currentSampling.getTestNodes().size(); a++)
			{
				localClassifierSelected.evaluate( currentSampling.getTestNodes().get(orderArray[a]),ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, "soft"); //use predict orders
			}
			int constant=0;
			for(int i=0;  i< currentSampling.getTestNodes().size(); i++)
			{
				if(previousLabel[i]== currentSampling.getClassLabelEstimated(currentSampling.getTestNodes().get(i)))
				{							
					constant++;
				}
				previousLabel[i]=currentSampling.getClassLabelEstimated(currentSampling.getTestNodes().get(i));
			}

			if((double)constant/currentSampling.getTestNodes().size() > stabilityThreshold)
			{
				stability++;
			}
			else
				stability=0;
			if(stability >= iterationForStability) 			  //if counter is equal to number to be stable then the system should be stable and exit
				isStable= true;				

			if(iterationNumber>100)
				break;
		
			currentAccuracy = Evaluation.findTotalAccuracy(Evaluation.CreateConfMat(currentSampling.getTestNodes()));

			iterationsResultsForTrain1Nodes[iterationNumber][0] = currentAccuracy;
				
			testAndOutputAccuraciesOfTrain1(currentSampling, train1Nodes, localClassifierSelected, iterationsResultsForTrain1Nodes, iterationNumber);
			
			FileUtilityClass.WriteMatrixToFile(iterationsResultsForTrain1Nodes, "Test_Train_EstimatedLabels_ICA_FOLD_"+  result.getIdentifier().getFold() + "_IterationResultsOdTrain1.txt");

			iterationNumber++;
			out.write("iteration:"+iterationNumber+" acc: "+currentAccuracy);
			
			out.newLine();
			
			
		 }

		long endTimeICA = System.currentTimeMillis();
		//System.out.println("After first labelling  Start "+ start);
		/*TODO KADRIYEB generate matlab file */
		
		//writeLabel(g, lc, file);
		double end = Evaluation.findTotalAccuracy(Evaluation.CreateConfMat(currentSampling.getTestNodes()));
		
		result.getEvaluationResult().createConfMatAndPrepareTheResults();
		
		/*Add to result */
		//write( start, end, file);
		System.out.println("ICA Start: "+ start +"  End "+ end);
		
		out.write("ICA Start: "+ start +"  End "+ end);
		
		result.setAccuracy(end);
		result.setStartAccuracy(start);
		result.setEndAccuracy(end);
		
		
		long trainingTime = startTimeTraining-endTimeTraining;
		long icaTime = startTimeICA-endTimeICA;	
		out.write("Training process: "+ trainingTime);
		out.write("ICA process: "+ icaTime);
		out.newLine();
	    //Close the output stream
	    out.close();
	    }catch (Exception e){//Catch exception if any
	      System.err.println("Error: " + e.getMessage());
	    }
 
	    
	}

		
	 private void testAndOutputAccuraciesOfTrain1(SamplingAbstractClass currentSampling, ArrayList<NodeClass> nodeList, LocalClassifierInterface localClassifier, double[][] iterationResults, int iteration)
	 {
		 // Observe
		 int index = 2;
		 int valid = 0;
		 for(NodeClass node : nodeList)
		 {
			 localClassifier.evaluate(node, ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, "soft");
			 iterationResults[iteration][index] = currentSampling.getClassLabelEstimated(node);
			 
			 if(iterationResults[iteration][index] == node.getClassOrder())
			 {
				 valid++;
			 }
			 
			 index++;
			 
		 }
		 
		 iterationResults[iteration][1] = (float)valid/(float)nodeList.size(); 
	     // Write to file		 
	 }

	 
	 private ArrayList<NodeClass> GetTraning1Nodes(SamplingAbstractClass currentSampling)
	 {
		 ArrayList<NodeClass> nodeList = new ArrayList<NodeClass>();
		 
		 for(NodeClass node : graph.getDataSet())
		 {
			 if(hasAnyNeigbourInTest(node, currentSampling))
			 {
				 nodeList.add(node);
			 }
		 }
		 
		 return nodeList;
	 }
	 
	 private boolean hasAnyNeigbourInTest(NodeClass node, SamplingAbstractClass currentSampling)
	 {
		 ArrayList<NodeClass> neigbours = node.getNeighbours(0, 1);
		
		 for(NodeClass n : neigbours)
		 {
			 if(currentSampling.isTheNodeInTestSetForTheFold(n))
				 return true;
		 }
		 
		 return false;
	 }
	 
	 public static void Test(GraphClass graph, GlobalClass global, int numberOfFolds)
	 {	 
		 String CCA_Name = ConstantVariable.CCA_ConstantVariables.ICA;
		 String samplingName = ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD;
		 String datasetName = "CORA";

		 /*Set Sampling */
		 SamplingFactory samplingFactory = new SamplingFactory();

		 SamplingAbstractClass sampling=samplingFactory.createSampling(SamplingTests.createDummyParametresForSampling(ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD), graph, global);
		 
		 EvaluationClass Evaluation = new EvaluationClass(sampling);

		 //System.out.print("ICA::  "+graph.getDataSet().size());
		 sampling.generateSampling(graph);
		 //System.out.print(" name : "+ sampling.getName()+ "  size: "+ sampling.getTestNodes().size());
		 //sampling.getSamplingResult();
		 /*get content input*/
		 //InputPreparerTestClass InputPreparerTest = new InputPreparerTestClass(sampling, global);
		 
		 InputPreparerFactory inputPrepFact = new InputPreparerFactory();
		 
		 ArrayList <Object> parametreList = new ArrayList<Object>();
		 parametreList.add(ConstantVariable.InputPreparer_Constants.CONTENT_INPUT);
		 InputPreparationMethodInterface inputPrep = inputPrepFact.createInputPreparer(parametreList, sampling, global);
		 ArrayList <Object> parametreList1 = new ArrayList<Object>();
		 parametreList1.add(ConstantVariable.InputPreparer_Constants.LINK_INPUT);
		 parametreList1.add(0);
		 parametreList1.add(1);
		 
		 AggregationMethodFactory fact = new AggregationMethodFactory();
		 AggregationMethodInterface agg = fact.createAggregationMethod(ConstantVariable.AggregationMethod_ConstantVariables.SUM, sampling);
		
		 parametreList1.add(agg);
		 InputPreparationMethodInterface inputPrep1 = inputPrepFact.createInputPreparer(parametreList1, sampling, global);
		  
		 ArrayList<InputPreparationMethodInterface> list = new ArrayList<InputPreparationMethodInterface>();
		 list.add(inputPrep);
		 list.add(inputPrep1);
		 
		 //System.out.print("Selam ICA Test : before create lc");

		 LocalClassifierInterface lc =LocalClassifierTest.createDummyClassifier(ConstantVariable.LocalClassifier_Constants.LOGISTIC_REGRESSION, list, sampling, global);

		 CollectiveClassificationAlgorithmFactory CCA_Factory = new CollectiveClassificationAlgorithmFactory(graph, global);
		 CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(CCA_Name, ConstantVariable.LocalClassifier_Constants.LOGISTIC_REGRESSION, sampling);

		 for(int i=0;  i<numberOfFolds; i++)
		 {

			 ResultIdentificationClass identifier = new ResultIdentificationClass();
			 identifier.setDataset(datasetName);
			 identifier.setCCA(CCA_Name);
			 identifier.setSampling(samplingName);
			 identifier.setFold(i);
			 ResultClass result = new ResultClass(identifier);
 			 //System.out.println("Fold :  " + i);
			 sampling.generateSampling(graph,i);
			 result.setSampling(sampling);
			 CCA.Run(sampling, result);

			 double accuracy[]= Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));

			 for(int j=0 ; j<global.classList.size() ;j++)
			 {
				 System.out.print("  ***Result:  "+ accuracy[j] + "* *** ");
			 }

			 //System.out.print("  ***Result: samplin name : "+ sampling.getName() +"  size: "+ sampling.getTestNodes().size() + "* *** ");
			 //System.out.println();
		 }
	 }

	 public ResultClassListClass TestWithWeka(GraphClass graph, GlobalClass global, int numberOfFolds, SamplingAbstractClass prevSampling, String localClassifierName)
	 {	 
		 String CCA_Name = ConstantVariable.CCA_ConstantVariables.ICA;
		 String samplingName = ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD;
		 String datasetName = ConstantVariable.DataSet_Constants.DATASET_NAMES[GlobalClass.RunningValues.executionFor];
		 
		 ResultClassListClass resultsList = new ResultClassListClass();
		 
		 SamplingAbstractClass sampling;

		 /*Set Sampling */
		 if(prevSampling==null)
		 {
			 SamplingFactory samplingFactory = new SamplingFactory();

			 sampling=samplingFactory.createSampling(SamplingTests.createDummyParametresForSampling(samplingName), graph, global);
		 }
		 else
			 sampling = prevSampling;
			
		 EvaluationClass Evaluation = new EvaluationClass(sampling);

		 CollectiveClassificationAlgorithmFactory CCA_Factory = new CollectiveClassificationAlgorithmFactory(graph, global);
		 CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(CCA_Name, localClassifierName, sampling);

		 for(int i=0;  i<numberOfFolds ;i++)
		 {

			 ResultIdentificationClass identifier = new ResultIdentificationClass();
			 identifier.setDataset(datasetName);
			 identifier.setCCA(CCA_Name);
			 identifier.setSampling(samplingName);
			 identifier.setFold(i);
			 ResultClass result = new ResultClass(identifier);
 			 System.out.println("Fold :  " + i);
 			 if(prevSampling==null)
 				 sampling.generateSampling(graph,i);
 			 
 			 result.setSampling(sampling);
 			 
			 CCA.Run(sampling, result);

			 double accuracy[]= Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));

			 for(int j=0 ; j<global.classList.size() ;j++)
			 {
				 System.out.print("  ***Result:  "+ accuracy[j] + "* *** ");
			 }

			 System.out.print("  ***Result: samplin name : "+ sampling.getName() +"  size: "+ sampling.getTestNodes().size() + "* *** ");
			 System.out.println();
			 resultsList.addToResultsList(result);
		 }
		 return resultsList;
	 }
	 
	 public static ResultClassListClass TestWithWekaInParallelStart(GraphClass graph, GlobalClass global, int numberOfFolds, SamplingAbstractClass prevSampling, String localClassifierName, String secondLevelLocalClassifierName)
	 {
		 CollectiveClassificationAlgorithmFactory CCA_Factory =new CollectiveClassificationAlgorithmFactory(graph, global);
		 CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(ConstantVariable.CCA_ConstantVariables.ICA, localClassifierName, null);
		 CCA.getLocalClassifierSelected().changeSecondLevelClassifierToBeUsed(secondLevelLocalClassifierName);
		 return ((ICAClass)CCA).TestWithWekaInParallel(graph, global, numberOfFolds, prevSampling, localClassifierName);
	 }
	 
	 public ResultClassListClass TestWithWekaInParallel(GraphClass graph, GlobalClass global, int numberOfFolds, SamplingAbstractClass prevSampling, String localClassifierName)
	 {		 			 

		 ICATestWithWekaProcessorThreadClass[] ICATestWithWekaProcessorPool;

		 System.out.println("ICA Weka Parallel Test Start ");		

		 int numberOfCores;

		 int numberOfCompletedFolds=0;
		 int numberOfNeededThreads=0;

		 numberOfCores = GlobalClass.RunningValues.maximumNumberOfCPUCoresToBeUsedForTooMuchRamRequiringAndTimeConsumingTasks;
		 ICATestWithWekaProcessorPool = new ICATestWithWekaProcessorThreadClass[numberOfCores];

		 if(numberOfFolds < numberOfCores)
		 {
			 numberOfNeededThreads = numberOfFolds; 
		 }
		 else
		 {
			 numberOfNeededThreads = numberOfCores;
		 }
		 
		 String datasetName = ConstantVariable.DataSet_Constants.DATASET_NAMES[GlobalClass.RunningValues.executionFor];
		 String CCA_Name = ConstantVariable.CCA_ConstantVariables.ICA;
		 String samplingName = ConstantVariable.Sampling_Constants.RANDOM_SAMPLING_METHOD;

		 ResultClassListClass resultsList = new ResultClassListClass();

		 while(numberOfCompletedFolds<numberOfFolds)
		 {
			 if(numberOfFolds-numberOfCompletedFolds<numberOfCores)
				 numberOfNeededThreads = numberOfFolds-numberOfCompletedFolds;


			 for(int i=0; i<numberOfNeededThreads; i++)
			 {
				 ResultIdentificationClass identifier = new ResultIdentificationClass();
				 identifier.setDataset(datasetName);
				 identifier.setCCA(CCA_Name);
				 identifier.setSampling("CO_TRAIN");
				 identifier.setFold(numberOfCompletedFolds+i);
				 ResultClass result = new ResultClass(identifier);

				 resultsList.addToResultsList(result);

				 ICATestWithWekaProcessorPool[i] = new ICATestWithWekaProcessorThreadClass(numberOfCompletedFolds+i, graph, global, result, localClassifierName, prevSampling);
//				 ICATestWithWekaProcessorPool[i] = new ICATestWithWekaProcessorThreadClass(numberOfCompletedFolds+i, graph, global, result, localClassifierName);
				 ICATestWithWekaProcessorPool[i].start();		
			 }

			 for(int i=0; i<numberOfNeededThreads; i++)
			 {				
				 try
				 {
					 ICATestWithWekaProcessorPool[i].join();
				 }
				 catch(Exception e)
				 {
					 e.printStackTrace();
				 }	
			 }			
			 numberOfCompletedFolds +=numberOfNeededThreads;
		 }
		 return resultsList;
	 }

	 private class ICATestWithWekaProcessorThreadClass extends Thread
	 {
		 int index;     
		 GraphClass graph;
		 GlobalClass global;
		 EvaluationClass Evaluation;
		 ResultClass result;
		 String localClassifierName;
		 SamplingAbstractClass sampling;
		 
		 ICATestWithWekaProcessorThreadClass(int index, GraphClass graph, GlobalClass global, ResultClass result, String localClassifierName, SamplingAbstractClass sampling)
		 {
			 this.index = index;
			 this.graph = graph;
			 this.global = global;             
			 this.result = result;
			 this.localClassifierName = localClassifierName; 
			 this.sampling = sampling;
		 }

		 ICATestWithWekaProcessorThreadClass(int index, GraphClass graph, GlobalClass global, ResultClass result, String localClassifierName)
		 {
			 this.index = index;
			 this.graph = graph;
			 this.global = global;             
			 this.result = result;
			 this.localClassifierName = localClassifierName; 
		 }        

		 public void run() 
		 {
			 String CCA_Name = ConstantVariable.CCA_ConstantVariables.ICA;
			 /*Set Sampling */
			/*
			 SamplingFactory samplingFactory = new SamplingFactory();
			 
			 Object[] args = TestParameterManager.getSamplingParametres(ConstantVariable.Sampling_Constants.RANDOM_SAMPLING_METHOD);
			 SamplingAbstractClass sampling=samplingFactory.createSampling(graph, global, ConstantVariable.Sampling_Constants.RANDOM_SAMPLING_METHOD, args);
		  */
			 EvaluationClass Evaluation = new EvaluationClass(sampling);

			 System.out.print("ICA::  "+graph.getDataSet().size());
			 result.setSampling(sampling);
			 sampling.generateSampling(graph, index);

			 System.out.print(" name : "+ sampling.getName()+ "  size: "+ sampling.getTestNodes().size());
			 sampling.getSamplingResult();

			 CollectiveClassificationAlgorithmFactory CCA_Factory = new CollectiveClassificationAlgorithmFactory(graph, global);
			 CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(CCA_Name, localClassifierName, sampling);


			 System.out.println("Fold :  " + index);
			 sampling.generateSampling(graph, index);
			 result.setSampling(sampling);
			 CCA.Run(sampling, result);

			 double accuracy[]= Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));

			 for(int j=0 ; j<global.classList.size() ;j++)
			 {
				 System.out.print("  ***Result:  "+ accuracy[j] + "* *** ");
			 }

			 System.out.print("  ***Result: sampling name : "+ sampling.getName() +"  size: "+ sampling.getTestNodes().size() + "* *** ");
			 System.out.println();			 
		 }
	 }

	 @Override
	public void initialize() {
		// TODO Auto-generated method stub
		
	}

	@Override
	public void Prepare() {
		// TODO Auto-generated method stub
		
	}

	@Override
	public ResultClass TestAlgorithm() {
		// TODO Auto-generated method stub
		return null;
	}

	@Override
	public void report() {
		// TODO Auto-generated method stub
		
	}
}

