package ClassificationAlgorithm;


import java.util.ArrayList;
import java.util.Random;

import Definitions.GraphClass;
import Global.ConstantVariable;
import Global.GlobalClass;
import LocalClassifier.LocalClassifierInterface;
import Result.EvaluationClass;
import Result.ResultClass;
import Result.ResultClassListClass;
import Result.ResultIdentificationClass;
import Sampling.SamplingAbstractClass;
import Sampling.SamplingFactory;
import Sampling.SamplingTests;

/*****************************************************************
* Class Name:	LinkOnly 
* Aim: 				Test the LinkOnly  with given parameters
* Algorithms: LinkOnly
*****************************************************************/

public class LinkOnlyClass extends CCAClass{
	
	LinkOnlyClass(GraphClass graph, GlobalClass global,LocalClassifierInterface classifier, String name) 
	{
		super(graph, global, classifier, name);		
	}

	/*****************************************************************
	 * Function Name:			Classifier   
	 * Aim: 						To work the LinkOnly algorithms with the given parameters
	 * Inputs:
	 *  				
	 * Graph g:					defines which graph is used while CCA
	 *int lamda					defines the lamda value that will be used in LRTrain
	 * Outputs:
	 * CCA algorithm is processed on the graph. 
	 *  
	 * Data Structures:
	 * Graph
	 * Node
	 *  
	 * Algorithms:
	 * Local Classifier
	 * Neighbourhood Functions 
	 *****************************************************************/

	public void Run(SamplingAbstractClass currentSampling, ResultClass result)
	{

		EvaluationClass Evaluation = new EvaluationClass(currentSampling);
		int[] orderArray = initializeOrderArray(currentSampling.getTestNodes().size());

		for(int i=0; i< orderArray.length; i++)
		{
			orderArray[i]=i;
		}

		localClassifierSelected.initialize(graph, result);
		//This method trains the dataset and finds a W matrix.
		localClassifierSelected.train(graph);
		//This method defines predict labels of the train+val nodes equal to their actual labels
		Random random = new Random();
		int sizeOfClass = global.classList.size();

		for(int i=0; i< currentSampling.getTestNodes().size() ;i++)
		{
			int randomClass= random.nextInt(sizeOfClass);
			currentSampling.setClassLabelEstimated(currentSampling.getTestNodes().get(i), randomClass);
		}

		for(int i=0; i< currentSampling.getTestNodes().size() ;i++)
		{
			localClassifierSelected.evaluate(currentSampling.getTestNodes().get(i), ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, "start");
		}
		//firstLabelling



		double start = Evaluation.findTotalAccuracy(Evaluation.CreateConfMat(currentSampling.getTestNodes()));

		//calls the iterative labelling method that means each test node will be labelled and the other test node's
		//neighborhood relations will be changed with respect to previous assigments	
		boolean isStable= false;
		int stability=0;
		int previousLabel[]= new int[currentSampling.getTestNodes().size()];				  /// initialize the previous label

		int iterationNumber=0;
		while(!isStable)  //check the stability of data set
		{
			iterationNumber++;
			reOrder(orderArray);
			for(int a = 0 ; a < currentSampling.getTestNodes().size(); a++)
			{
				localClassifierSelected.evaluate( currentSampling.getTestNodes().get(orderArray[a]),ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, "soft"); //use predict orders
			}
			int constant=0;
			for(int i=0;  i< currentSampling.getTestNodes().size(); i++)
			{
				if(previousLabel[i]== currentSampling.getClassLabelEstimated(currentSampling.getTestNodes().get(i)))
					constant++;
				previousLabel[i]=currentSampling.getClassLabelEstimated(currentSampling.getTestNodes().get(i));
			}
			if((double)constant/currentSampling.getTestNodes().size() >(double)0.9)
			{
				stability++;
			}
			else
				stability=0;
			if(stability >= 50) 			  //if counter is equal to number to be stable then the system should be stable and exit
				isStable= true;				

		}
		//writeLabel(g, lc, file);
		double end = Evaluation.findTotalAccuracy(Evaluation.CreateConfMat(currentSampling.getTestNodes()));
		
		result.getEvaluationResult().createConfMatAndPrepareTheResults();

		//write( start, end, file);
		System.out.println("Link Only Start: "+ start +"  End "+ end);
		
		result.setAccuracy(end);
		result.setStartAccuracy(start);
		result.setEndAccuracy(end);
		
	}
	
	public 	void BecomeStableForTheTest(SamplingAbstractClass currentSampling)
	{	
		System.out.println("Stability start...");
		EvaluationClass Evaluation = new EvaluationClass(currentSampling);
		int[] orderArray = initializeOrderArray(currentSampling.getTestNodes().size());

		for(int i=0; i< orderArray.length; i++)
		{
			orderArray[i]=i;
		}

		for(int i=0; i< currentSampling.getTestNodes().size() ;i++)
		{
			localClassifierSelected.evaluate(currentSampling.getTestNodes().get(i), ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, "start");
		}		

		double start = Evaluation.findTotalAccuracy(Evaluation.CreateConfMat(currentSampling.getTestNodes()));

		//calls the iterative labelling method that means each test node will be labelled and the other test node's
		//neighborhood relations will be changed with respect to previous assigments	
		boolean isStable= false;
		int stability=0;
		int previousLabel[]= new int[currentSampling.getTestNodes().size()];				  /// initialize the previous label

		int iterationNumber=0;
		while(!isStable)  //check the stability of data set
		{
			iterationNumber++;
			reOrder(orderArray);
			for(int a = 0 ; a < currentSampling.getTestNodes().size(); a++)
			{
				localClassifierSelected.evaluate( currentSampling.getTestNodes().get(orderArray[a]),ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, "soft"); //use predict orders
			}
			int constant=0;
			for(int i=0;  i< currentSampling.getTestNodes().size(); i++)
			{
				if(previousLabel[i]== currentSampling.getClassLabelEstimated(currentSampling.getTestNodes().get(i)))
					constant++;
				previousLabel[i]=currentSampling.getClassLabelEstimated(currentSampling.getTestNodes().get(i));
			}
			if((double)constant/currentSampling.getTestNodes().size() >(double)0.9)
			{
				stability++;
			}
			else
				stability=0;
			if(stability >= 50) 			  //if counter is equal to number to be stable then the system should be stable and exit
			{
				System.out.println("Threshold OK");
				isStable= true;
			}

		}
		//writeLabel(g, lc, file);
		double end = Evaluation.findTotalAccuracy(Evaluation.CreateConfMat(currentSampling.getTestNodes()));
		
		//result.getEvaluationResult().createConfMatAndPrepareTheResults();

		//write( start, end, file);
		System.out.println("Link Only Start: "+ start +"  End "+ end);
	}


	public void Test(GraphClass graph, GlobalClass global, int numberOfFolds)
	{		 			 

		String dataset= "CORA";//ConstantVariable.DataSet_Constants.CORA;
		
		ArrayList<ResultClass> results =new ArrayList<ResultClass>();

		/*Set Sampling */
		SamplingFactory samplingFactory = new SamplingFactory();

		SamplingAbstractClass sampling=samplingFactory.createSampling(SamplingTests.createDummyParametresForSampling(ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD), graph, global);
		
		EvaluationClass Evaluation = new EvaluationClass(sampling);
		
		CollectiveClassificationAlgorithmFactory CCA_Factory =new CollectiveClassificationAlgorithmFactory(graph, global);
		CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(ConstantVariable.CCA_ConstantVariables.LINK_ONLY, ConstantVariable.LocalClassifier_Constants.LOGISTIC_REGRESSION, sampling);

		for(int i=0;  i<numberOfFolds ;i++)
		{
			 ResultIdentificationClass identifier= new ResultIdentificationClass();
			 identifier.setDataset(dataset);
			 identifier.setFold(i);
			 identifier.setCCA(ConstantVariable.CCA_ConstantVariables.LINK_ONLY);
			 ResultClass result = new ResultClass(identifier); 
			System.out.println("Fold :  " + i);
			sampling.generateSampling(graph,i);
			result.setSampling(sampling);
			CCA.Run(sampling, result);

			double accuracy[]= Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));

			for(int j=0 ; j<global.classList.size() ;j++)
			{
				System.out.print("  ***Result:  "+ accuracy[j] + "* *** ");
			}

			System.out.println();
			results.add(result);
		}
	}
	
	public static void TestInParallel(GraphClass graph, GlobalClass global, LocalClassifierInterface classifier, int numberOfFolds)
	{		 			 
		LinkOnlyClass LinkOnlyClassifier;
		
		LinkOnlyClassifier = new LinkOnlyClass(graph, global, classifier, ConstantVariable.CCA_ConstantVariables.LINK_ONLY);
		
		LinkOnlyTestProcessorThreadClass[] LinkOnlyTestProcessorPool;

		System.out.println("Link Only Test Start ");
		
		int numberOfCores;
		
		int numberOfCompletedFolds=0;
		int numberOfNeededThreads=0;
		
		numberOfCores = GlobalClass.RunningValues.maximumNumberOfCPUCoresToBeUsedForTooMuchRamRequiringAndTimeConsumingTasks;
		LinkOnlyTestProcessorPool = new LinkOnlyTestProcessorThreadClass[numberOfCores];
		
		if(numberOfFolds < numberOfCores)
			numberOfNeededThreads = numberOfFolds;
		else
			numberOfNeededThreads = numberOfCores;
		
		String datasetName ="CORA";
		String CCA_Name = ConstantVariable.CCA_ConstantVariables.CONTENT_ONLY;
		String samplingName = ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD;

		
		while(numberOfCompletedFolds<numberOfFolds)
		{
			if(numberOfFolds-numberOfCompletedFolds<numberOfCores)
				numberOfNeededThreads = numberOfFolds-numberOfCompletedFolds;
				
			
			for(int i=0; i<numberOfNeededThreads; i++)
			{
				ResultIdentificationClass identifier = new ResultIdentificationClass();
				identifier.setDataset(datasetName);
				identifier.setCCA(CCA_Name);
				identifier.setSampling(samplingName);
				identifier.setFold(i);

				ResultClass result = new ResultClass(identifier);
				
				LinkOnlyTestProcessorPool[i] = LinkOnlyClassifier.new LinkOnlyTestProcessorThreadClass(numberOfCompletedFolds+i, graph, global, result);
				LinkOnlyTestProcessorPool[i].start();		
			}
			
			for(int i=0; i<numberOfNeededThreads; i++)
			{				
				try
				{
					LinkOnlyTestProcessorPool[i].join();
				}
				catch(Exception e)
				{
					e.printStackTrace();
				}	
			}			
			numberOfCompletedFolds +=numberOfNeededThreads;
		}		
	}
	
	private class LinkOnlyTestProcessorThreadClass extends Thread
    {
        int index;     
        GraphClass graph;
        GlobalClass global;
        EvaluationClass Evaluation;
        ResultClass result;
        SamplingAbstractClass  sampling;
        
        LinkOnlyTestProcessorThreadClass(int index, GraphClass graph, GlobalClass global, ResultClass result, SamplingAbstractClass sampling)
        {
             this.index = index;
             this.graph = graph;
             this.global = global;             
             this.result = result;
             this.sampling = sampling;
        }        

        
        LinkOnlyTestProcessorThreadClass(int index, GraphClass graph, GlobalClass global, ResultClass result)
        {
             this.index = index;
             this.graph = graph;
             this.global = global;             
             this.result = result;
        }        
        
        public void run() 
        {

			/*Set Sampling */
        	if(sampling == null)
        	{
        		SamplingFactory samplingFactory = new SamplingFactory();
	
        		this.sampling=samplingFactory.createSampling(SamplingTests.createDummyParametresForSampling(ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD), graph, global);
        	}

        	sampling.generateSampling(graph,index);

			Evaluation = new EvaluationClass(sampling);  
	
			CollectiveClassificationAlgorithmFactory CCA_Factory =new CollectiveClassificationAlgorithmFactory(graph, global);
			CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(ConstantVariable.CCA_ConstantVariables.LINK_ONLY, ConstantVariable.LocalClassifier_Constants.LOGISTIC_REGRESSION, sampling);

			System.out.println("Fold :  " + index);
			sampling.generateSampling(graph,index);
			result.setSampling(sampling);
			CCA.Run(sampling, result);
	
			double accuracy[]= Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));

			for(int j=0 ; j<global.classList.size() ;j++)
			{
				System.out.print("  ***Result:  "+ accuracy[j] + "* *** ");
			}

			System.out.println();
			
        }
    }
	
	public ResultClassListClass TestWithWeka(GraphClass graph, GlobalClass global, int numberOfFolds, SamplingAbstractClass prevSampling, String localClassifierName)
	{		 			 

		String dataset= ConstantVariable.DataSet_Constants.DATASET_NAMES[GlobalClass.RunningValues.executionFor];//ConstantVariable.DataSet_Constants.CORA;
		String samplingName = ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD;
		
		ResultClassListClass resultsList =new ResultClassListClass();
		
		if(localClassifierName==null)
			localClassifierName = ConstantVariable.WekaClassifiers.GENERIC_WEKA_CLASSIFIER; 

		/*Set Sampling */
		SamplingAbstractClass sampling;		
		
		if(prevSampling==null)
		{
			SamplingFactory samplingFactory = new SamplingFactory();
		
			sampling=samplingFactory.createSampling(SamplingTests.createDummyParametresForSampling(samplingName), graph, global);
		}
		else
			sampling = prevSampling;
		
		EvaluationClass Evaluation = new EvaluationClass(sampling);
		

		CollectiveClassificationAlgorithmFactory CCA_Factory = new CollectiveClassificationAlgorithmFactory(graph, global);
		CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(ConstantVariable.CCA_ConstantVariables.LINK_ONLY, localClassifierName, sampling);

		for(int i=0;  i<numberOfFolds ;i++)
		{
			ResultIdentificationClass identifier= new ResultIdentificationClass();
			identifier.setDataset(dataset);
			identifier.setFold(i);
			identifier.setCCA(ConstantVariable.CCA_ConstantVariables.LINK_ONLY);
			ResultClass result = new ResultClass(identifier); 
			System.out.println("Fold :  " + i);
			if(prevSampling!=null)
				sampling.generateSampling(graph,i);
			result.setSampling(sampling);
			CCA.Run(sampling, result);

			double accuracy[]= Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));

			for(int j=0 ; j<global.classList.size() ;j++)
			{
				System.out.print("  ***Result:  "+ accuracy[j] + "* *** ");
			}

			System.out.println();
			resultsList.addToResultsList(result);
		}
		return resultsList;
	}
	
	public static ResultClassListClass TestWithWekaInParallelStart(GraphClass graph, GlobalClass global, int numberOfFolds, SamplingAbstractClass prevSampling, String localClassifierName, String secondLevelLocalClassifierName)
	{			
		CollectiveClassificationAlgorithmFactory CCA_Factory =new CollectiveClassificationAlgorithmFactory(graph, global);
		CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(ConstantVariable.CCA_ConstantVariables.LINK_ONLY, localClassifierName, null);
		CCA.getLocalClassifierSelected().changeSecondLevelClassifierToBeUsed(secondLevelLocalClassifierName);
		return ((LinkOnlyClass)CCA).TestWithWekaInParallel(graph, global, numberOfFolds, prevSampling, localClassifierName);
	}
	
	public ResultClassListClass TestWithWekaInParallel(GraphClass graph, GlobalClass global, int numberOfFolds, SamplingAbstractClass prevSampling, String localClassifierName)
	{	 			 
		
		LinkOnlyTestWithWekaProcessorThreadClass[] LinkOnlyTestWithWekaProcessorPool;
		
		System.out.println("Link Only Weka Parallel Test Start ");		
		
		int numberOfCores;
		
		int numberOfCompletedFolds=0;
		int numberOfNeededThreads=0;
		
		numberOfCores = GlobalClass.RunningValues.maximumNumberOfCPUCoresToBeUsedForTooMuchRamRequiringAndTimeConsumingTasks;
		LinkOnlyTestWithWekaProcessorPool = new LinkOnlyTestWithWekaProcessorThreadClass[numberOfCores];
		
		if(ConstantVariable.CCA_ConstantVariables.NUMBER_OF_FOLDS_FOR_CONTENT_ONLY < numberOfCores)
			numberOfNeededThreads = ConstantVariable.CCA_ConstantVariables.NUMBER_OF_FOLDS_FOR_CONTENT_ONLY;
		else
			numberOfNeededThreads = numberOfCores;
		
		String datasetName =ConstantVariable.DataSet_Constants.DATASET_NAMES[GlobalClass.RunningValues.executionFor];
		String CCA_Name = ConstantVariable.CCA_ConstantVariables.CONTENT_ONLY;
		String samplingName = ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD;
		
		ResultClassListClass resultsList =new ResultClassListClass();
		
		while(numberOfCompletedFolds<numberOfFolds)
		{
			if(numberOfFolds-numberOfCompletedFolds<numberOfCores)
				numberOfNeededThreads = numberOfFolds-numberOfCompletedFolds;
				
			
			for(int i=0; i<numberOfNeededThreads; i++)
			{
				ResultIdentificationClass identifier = new ResultIdentificationClass();
				identifier.setDataset(datasetName);
				identifier.setCCA(CCA_Name);
				identifier.setSampling(samplingName);
				identifier.setFold(numberOfCompletedFolds+i);

				ResultClass result = new ResultClass(identifier);
				resultsList.addToResultsList(result);
				
				LinkOnlyTestWithWekaProcessorPool[i] = new LinkOnlyTestWithWekaProcessorThreadClass(numberOfCompletedFolds+i, graph, global, result, localClassifierName, prevSampling);
//				LinkOnlyTestWithWekaProcessorPool[i] = new LinkOnlyTestWithWekaProcessorThreadClass(numberOfCompletedFolds+i, graph, global, result, localClassifierName);
				LinkOnlyTestWithWekaProcessorPool[i].start();		
			}
			
			for(int i=0; i<numberOfNeededThreads; i++)
			{				
				try
				{
					LinkOnlyTestWithWekaProcessorPool[i].join();
				}
				catch(Exception e)
				{
					e.printStackTrace();
				}	
			}			
			numberOfCompletedFolds +=numberOfNeededThreads;
		}
		return resultsList;
	}
	
	private class LinkOnlyTestWithWekaProcessorThreadClass extends Thread
    {
        int index;     
        GraphClass graph;
        GlobalClass global;
        EvaluationClass Evaluation;
        ResultClass result;
        String localClassifierName;
        SamplingAbstractClass sampling;
        
        LinkOnlyTestWithWekaProcessorThreadClass(int index, GraphClass graph, GlobalClass global, ResultClass result, String localClassifierName, SamplingAbstractClass sampling)
        {
            this.index = index;
            this.graph = graph;
            this.global = global;             
            this.result = result;
            this.localClassifierName = localClassifierName;
        	this.sampling = sampling;
        }
         
        LinkOnlyTestWithWekaProcessorThreadClass(int index, GraphClass graph, GlobalClass global, ResultClass result, String localClassifierName)
        {
             this.index = index;
             this.graph = graph;
             this.global = global;             
             this.result = result;
             this.localClassifierName = localClassifierName;
        }        
        
        public void run() 
        {

			/*Set Sampling */
		//	SamplingFactory samplingFactory = new SamplingFactory();
	
		//	SamplingAbstractClass sampling=samplingFactory.createSampling(SamplingTests.createDummyParametresForSampling(ConstantVariable.Sampling_Constants.SNOWBALL_SAMPLING_METHOD), graph, global);
			
/*			 SamplingFactory samplingFactory = new SamplingFactory();
			 Object[] args = TestParameterManager.getSamplingParametres(ConstantVariable.Sampling_Constants.RANDOM_SAMPLING_METHOD);
			 SamplingAbstractClass sampling=samplingFactory.createSampling(graph, global, ConstantVariable.Sampling_Constants.RANDOM_SAMPLING_METHOD, args);
*/
			sampling.generateSampling(graph,index);  
			 System.out.print(" name : "+ sampling.getName()+ "  size: "+ sampling.getTestNodes().size());

			Evaluation = new EvaluationClass(sampling);  
					
			//TODO AS BAKILACAK
			CollectiveClassificationAlgorithmFactory CCA_Factory =new CollectiveClassificationAlgorithmFactory(graph, global);
			CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(ConstantVariable.CCA_ConstantVariables.LINK_ONLY, localClassifierName, sampling);

			System.out.println("Fold :  " + index);

			//sampling.writeFolds(index);
			result.setSampling(sampling);
			sampling.generateSampling(graph,index);  
			CCA.Run(sampling, result);
	
			double accuracy[]= Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));

			for(int j=0 ; j<global.classList.size() ;j++)
			{
				System.out.print("  ***Result:  "+ accuracy[j] + "* *** ");
			}

			System.out.println();
			
        }
    }

	@Override
	public void initialize() {
		// TODO Auto-generated method stub
		
	}


	@Override
	public void Prepare() {
		// TODO Auto-generated method stub
		
	}


	@Override
	public ResultClass TestAlgorithm() {
		// TODO Auto-generated method stub
		return null;
	}


	@Override
	public void report() {
		// TODO Auto-generated method stub
		
	}
}


			

		
	

