package ClassificationAlgorithm;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Random;

import Analysis.PengMIClass;
import ClassificationAlgorithmUtil.CCAUtil;
import Definitions.GraphClass;
import Definitions.NodeClass;
import Global.ConstantVariable;
import Global.GlobalClass;
import InputPreparer.InputPreparationMethodInterface;
import InputPreparer.InputPreparerTestClass;
import LocalClassifier.LocalClassifierInterface;
import LocalClassifier.LocalClassifierTest;
import Result.EvaluationClass;
import Result.ResultClass;
import Result.ResultClassListClass;
import Result.ResultIdentificationClass;
import Sampling.SamplingAbstractClass;
import Utility.FileUtilityClass;
import Utility.WorkerUtilityClass;


public class CoTrainAlgorithmClass extends CCAClass
{
	private float LAMDA = (float)0.5;
	
	CoTrainAlgorithmClass(GraphClass graph, GlobalClass global)
	{
		super(graph, global, null, ConstantVariable.CCA_ConstantVariables.CO_TRAIN );
	}

	/***
	 * 
	 * @param graph
	 * @param global
	 * @param index
	 * @return
	 * @throws IOException
	 */
	public static ResultClass TestCOTrain(GraphClass graph, GlobalClass  global, SamplingAbstractClass sampling, int index) throws IOException
	{

		String localClassifierName = ConstantVariable.WekaClassifiers.GENERIC_WEKA_CLASSIFIER;
		String CCA_Name = ConstantVariable.CCA_ConstantVariables.CO_TRAIN;

		CollectiveClassificationAlgorithmFactory CCA_Factory = new CollectiveClassificationAlgorithmFactory(graph, global);
		CollectiveClassificationAlgorithmClass CCA = CCA_Factory.createCCA(CCA_Name, localClassifierName, sampling);

		EvaluationClass Evaluation = new EvaluationClass(sampling);
		//sampling.generateSampling(graph);
		ResultIdentificationClass identifier = new ResultIdentificationClass();

		identifier.setFold(index);
		ResultClass result = new ResultClass(identifier);
		sampling.generateSampling(graph, index);
		result.setSampling(sampling);
		
		CCA.Run(sampling, result);
		double accuracy[]=Evaluation.findClasifierAccuracyForAllClasses(Evaluation.CreateConfMat(sampling.getTestNodes()));
		
		CCAUtil.WriteIterationResultsToFile(global,"COTRAIN_FeatureEnrichement_"+ ConstantVariable.FEATURE_ENRICHMENT_PROPERTIES.selected +"_K_"+ConstantVariable.EXECUTION_FOR_COTRAIN.numberOfClassifier+"_"+ ConstantVariable.WekaClassifiers.SELECTED_WEKA_CLASSIFIER +"_Sampling_NumberOfFeature"+ConstantVariable.EXECUTION_FOR_COTRAIN.numberOfFeatureSize+ result.getIdentifier().getFold()+".txt", null, accuracy);
		
		return result;
	}
	
	public static ResultClassListClass TestCoTrain(GraphClass graph, GlobalClass  global, SamplingAbstractClass sampling, int fold) throws IOException
	{
		ResultClassListClass resultsList = new ResultClassListClass();
		
		for(int i=0; i<fold; i++)
		{
			ResultClass result = CoTrainAlgorithmClass.TestCOTrain(graph,global,sampling,i);
			resultsList.addToResultsList(result);
		}

		return resultsList;		
	}

	public static ResultClassListClass TestWithWekaInParallelStart(GraphClass graph, GlobalClass global, SamplingAbstractClass sampling, Object... args)
	 {
		 CoTrainAlgorithmClass  CCA = new CoTrainAlgorithmClass(graph, global) ;
		 return CCA.TestWithWekaInParallel(graph, global, sampling, (Integer)args[4], args);
	 }

	 public ResultClassListClass TestWithWekaInParallel(GraphClass graph, GlobalClass global, SamplingAbstractClass currentSampling, int numberOfFolds, Object... args )
	 {		 			 

		 CoTrainTestWithWekaProcessorThreadClass[] CoTrainTestWithWekaProcessorPool;
		 
		 int numberOfCores;
		 int numberOfCompletedFolds=0;
		 int numberOfNeededThreads=0;

		 numberOfCores = GlobalClass.RunningValues.maximumNumberOfCPUCoresToBeUsedForTooMuchRamRequiringAndTimeConsumingTasks;
		 CoTrainTestWithWekaProcessorPool = new CoTrainTestWithWekaProcessorThreadClass[numberOfCores];

		 if(numberOfFolds < numberOfCores)
		 {
			 numberOfNeededThreads = numberOfFolds; 
		 } 
		 else
		 {
			 numberOfNeededThreads = numberOfCores;
		 }
		 String datasetName = ConstantVariable.DataSet_Constants.DATASET_NAMES[GlobalClass.RunningValues.executionFor];
		 String CCA_Name = ConstantVariable.CCA_ConstantVariables.CO_TRAIN;

		 ResultClassListClass resultsList = new ResultClassListClass();

		 while(numberOfCompletedFolds<numberOfFolds)
		 {
			 if(numberOfFolds-numberOfCompletedFolds<numberOfCores)
				 numberOfNeededThreads = numberOfFolds-numberOfCompletedFolds;
			 
			 for(int i=0; i<numberOfNeededThreads; i++)
			 {
				 ResultIdentificationClass identifier = new ResultIdentificationClass();
				 identifier.setDataset(datasetName);
				 identifier.setCCA(CCA_Name);
				 identifier.setFold(numberOfCompletedFolds+i);
              
				 ResultClass result = new ResultClass(identifier);

				 resultsList.addToResultsList(result);
				 
				 
				 CoTrainTestWithWekaProcessorPool[i] = new CoTrainTestWithWekaProcessorThreadClass(graph, global, currentSampling, numberOfCompletedFolds+i, args);
				 CoTrainTestWithWekaProcessorPool[i].start();		
			 }

			 for(int i=0; i<numberOfNeededThreads; i++)
			 {				
				 try
				 {
					 CoTrainTestWithWekaProcessorPool[i].join();
					 
				 }
				 catch(Exception e)
				 {
					 e.printStackTrace();
				 }	
			 }			
			 numberOfCompletedFolds +=numberOfNeededThreads;
		 }
		 return resultsList;
	 }

     private class CoTrainTestWithWekaProcessorThreadClass extends Thread
	 {
    	 private String outputFileName;
		 private int index;     
		 private GraphClass graph;
		 private GlobalClass global;
		 private String localClassifierName;
		 private Random rand;	
		 private ArrayList <LocalClassifierInterface> classifierList;
		 private float iterationTestAccuracyResult[][];
		 
		 private int iterationNumberForCoTraining;
		 private int numberOfFeaturesToBeSelected;
		 private int numberOfClassifiers;
		 SamplingAbstractClass currentSampling;
  		 double[] relevanceProbobilities;
			
		 
		 CoTrainTestWithWekaProcessorThreadClass(GraphClass graph, GlobalClass global, SamplingAbstractClass currentSampling, int index, Object...args)
		 {


    		 this.currentSampling = currentSampling;
    		 currentSampling.generateSampling(graph, index);
    		 this.index = index;
			 this.graph = graph;
			 this.global = global;             
			 this.iterationNumberForCoTraining =  ConstantVariable.EXECUTION_FOR_COTRAIN.iterationNumberForCoTraining;
			 this.localClassifierName = ConstantVariable.EXECUTION_FOR_COTRAIN.classifierName;			 

			 this.outputFileName = (String)args[0] + "_ClassifierName_"+this.localClassifierName +"_Fold_"+index;
			 this.numberOfFeaturesToBeSelected =   (Integer)args[1]; 
			 this.numberOfClassifiers =  (Integer)args[2];			 

			 this.classifierList = new ArrayList <LocalClassifierInterface>();
			 
			 this.iterationTestAccuracyResult = new float[iterationNumberForCoTraining][(global.classSize*global.classSize+ numberOfClassifiers+3 +1)];
			 
			 this.rand = new Random(index);
			 
			 int numberOfFeature = graph.getDataSet().get(0).getContentList().get(0).getAttributeList().size();

			 try {
				relevanceProbobilities = calculateRelevancesProbobilities(currentSampling,numberOfFeature);
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}

		   }  

			public void run()
			{
				try
				{
					long startTimeOfRun = System.nanoTime();

					EvaluationClass eval = new EvaluationClass(currentSampling);					
					ArrayList<NodeClass> originalTrainNodes = currentSampling.getTrainAndValNodes();
					ArrayList<NodeClass> testNodes =currentSampling.getTestNodes();
					double relevanceRedundancyOfSelected[][] =  new double [this.numberOfClassifiers][4];
					int[][] selectedFeatures = SelectFeatureSubSets(relevanceProbobilities, relevanceRedundancyOfSelected);
					String redundancyFileName = this.outputFileName + "_COUNT_REDUNDANCIES.txt";
					FileUtilityClass.WriteMatrixToFile(relevanceRedundancyOfSelected, redundancyFileName);

					initializeClassifierWithSelectedFeaturesSubSpaces(selectedFeatures, currentSampling);
					
					for (int run = 0; run< this.iterationNumberForCoTraining ;run++ )
					{
						long startiterationTime = System.nanoTime();
						if (currentSampling.getTestNodes().size()>0)
						{
							for (int i=0 ; i<classifierList.size();i++)
							{
								classifierList.get(i).setSampling(currentSampling);
								classifierList.get(i).train(graph);			
							}

							GetCombinationProbabilitiesOfCOTrain(currentSampling,currentSampling.getTrainNodes());
							

							float[] classifierAccuracies = new float[numberOfClassifiers];
							double[][] combinationProbabilityOfTest = getClassifierResultsForTestNodes(currentSampling,currentSampling.getTestNodes(), classifierAccuracies);
							choseTestDataSetMoveToTrainSet(currentSampling, combinationProbabilityOfTest);

							double testAccuracy = CCAUtil.findAccuracy(currentSampling ,testNodes);							
							int currentIndex= 0;
							
							iterationTestAccuracyResult[run][currentIndex++] = (float)currentSampling.getTestNodes().size();
							iterationTestAccuracyResult[run][currentIndex++] = (float)CCAUtil.findAccuracy(currentSampling, originalTrainNodes);
							iterationTestAccuracyResult[run][currentIndex++] = (float)testAccuracy;

							for(float f : classifierAccuracies)
							{
								 iterationTestAccuracyResult[run][currentIndex] = f;
								 currentIndex++;
							}
							
			
							int[][] confmat= CCAUtil.CreateConfMat(currentSampling, testNodes, global.classSize) ;
							
							for(int row = 0 ; row< confmat.length; row++)
							{
								for(int value:  confmat[row])
								{
									iterationTestAccuracyResult[run][currentIndex] = (float)value;
									currentIndex++;
								}
							}
		
							testAccuracy = CCAUtil.findAccuracy(currentSampling, currentSampling.getTestNodes());
							long endOfIteration = System.nanoTime();

							iterationTestAccuracyResult[run][currentIndex] = endOfIteration - startiterationTime;
							
							double[] acc = eval.findClasifierAccuracyForAllClasses(eval.createConfMat());
							
							String name = this.outputFileName+"TestAccuracyIterations.txt";
							String info = this.outputFileName+ ".txt" ;
							CCAUtil.writeFile(name, info, iterationTestAccuracyResult, testAccuracy, acc);
			
						}
						else
						{
							break;
						}
					}

					for(int i=0; i<testNodes.size();i++)
					{
						currentSampling.addNodeToTestSet(testNodes.get(i));
					}
					double testAccuracy = CCAUtil.findAccuracy(currentSampling, currentSampling.getTestNodes());
					double[] acc = eval.findClasifierAccuracyForAllClasses(eval.createConfMat());
					
					String name = this.outputFileName + "_lamda_"+LAMDA+"_.txt";
					String info = this.outputFileName + "_.lamda_"+LAMDA+"_.txt" ;
					
					long endOfTest = System.nanoTime();
					CCAUtil.writeFile(name, info, iterationTestAccuracyResult, testAccuracy, acc);
					
				}
				catch (Exception e)
				{
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}

			/*
		       % CoRFS(alldataini, rrn,  itern, nmrmr, K, portion, cl,saveName)
		       % Co-training with relevant feature subsets
		       % alldataini: Initial dataset. End column of the dataset is used as
		labels. Labels should be > 0
		       % rrn: Random run number
		       % itern: Iteration number for co-training
		       % nmrmr: Number of features to be selected
		       % K: Number of classifiers

		       % About Sampling: portion: portion of the labeled data to be used for training
		       % cl: PRTools classifier name
			 */

			public void initializeClassifierWithSelectedFeaturesSubSpaces(int[][]selectedFeaturesForClassifiers, SamplingAbstractClass currentSampling)
			{
				//use classifier name and create classifier NB
				for(int i=0; i<numberOfClassifiers;i++)
				{					//Create classifier with given name...
					LocalClassifierInterface classifier=createDummyClassifierForCoTrainWithSelectedSubspace(selectedFeaturesForClassifiers[i], currentSampling);
					classifierList.add(classifier);
				}
			}

			public LocalClassifierInterface createDummyClassifierForCoTrainWithSelectedSubspace(int[] selectedFeaturedForClassifiers, SamplingAbstractClass currentSampling)
			{
				//Content Input get
				ArrayList<InputPreparationMethodInterface> inputPrepList = InputPreparerTestClass.createDummyContentInputPreparerWithSelectedFeatures(ConstantVariable.InputPreparer_Constants.CONTENT_INPUT_WITH_SELECTED_FEATURES,selectedFeaturedForClassifiers, currentSampling,global);
				//classifier samplingiilmemeli
				return LocalClassifierTest.createDummyClassifier(localClassifierName,inputPrepList, currentSampling, global);
			}


			public double[][] GetCombinationProbabilitiesOfCOTrain(SamplingAbstractClass currentSampling, ArrayList<NodeClass> nodeList)
			{
				try
				{
					double[][] combinationProbabilityOfTest= new double[nodeList.size()][global.classSize];
					double classifierResultsForNode[][] = new double[classifierList.size()][global.classSize];
					double ySoft[][][] = new double[nodeList.size()][classifierList.size()][global.classSize]; 

					/*
					 * For every node  
					 * 
					 * */
					for(int i=0; i< nodeList.size(); i++)
					{
						NodeClass selectedNode = nodeList.get(i);

						for (int j=0; j<classifierList.size();j++)
						{
							ySoft[i][j] = classifierList.get(j).evaluate(selectedNode,ConstantVariable.Sampling_Constants.NODE_IS_IN_TRAINING_SET_FOR_THE_FOLD, ConstantVariable.LocalClassifier_Constants.CRISP);
							classifierResultsForNode[j]= WorkerUtilityClass.ConvertSoftClsOutputToCrisp(ySoft[i][j]);
						}

						combinationProbabilityOfTest[i] = WorkerUtilityClass.GetMajorityVote(classifierResultsForNode);
						int estimated = WorkerUtilityClass.findMaxIndexForTheGivenVector(combinationProbabilityOfTest[i]);
						currentSampling.setClassLabelEstimated(selectedNode, estimated);
					}

					return combinationProbabilityOfTest;

				}
				catch (Exception e)
				{                           
					e.printStackTrace();
				}

				return null;
			}

			public double[][] getClassifierResultsForTestNodes(SamplingAbstractClass currentSampling, ArrayList<NodeClass> nodeList, float[] classifierAccuracies)
			{
				try
				{
					double combinationProbobilityOfTest[][] = new double[nodeList.size()][global.classSize];
					double classifierResultsForNodes[][][] = new double[nodeList.size()][classifierList.size()][global.classSize];
					int[] numberOfCorrectEstimationClassForClassifiers = new int [classifierList.size()];
					
					for(int i=0; i< classifierList.size(); i++)
					{
						numberOfCorrectEstimationClassForClassifiers[i] = 0 ;
					}
					
					for(int i=0; i< nodeList.size(); i++)
					{
						NodeClass selectedNode = nodeList.get(i);
						int actualClass = nodeList.get(i).getClassOrder();
						
						for (int j=0; j<classifierList.size();j++)
						{
							classifierResultsForNodes[i][j]= classifierList.get(j).evaluate(selectedNode,ConstantVariable.Sampling_Constants.NODE_IS_IN_TEST_SET_FOR_THE_FOLD, ConstantVariable.LocalClassifier_Constants.SOFT);
							int predictedClass = WorkerUtilityClass.findMaxIndexForTheGivenVector(classifierResultsForNodes[i][j]);
							if(predictedClass == actualClass)
								numberOfCorrectEstimationClassForClassifiers[j] ++;
						}

						
						combinationProbobilityOfTest[i] = WorkerUtilityClass.GetMajorityVote(classifierResultsForNodes[i]);
						int estimated = WorkerUtilityClass.findMaxIndexForTheGivenVector(combinationProbobilityOfTest[i]);
						currentSampling.setClassLabelEstimated(selectedNode, estimated);
					}

					for(int i=0; i< classifierList.size(); i++)
					{
						classifierAccuracies[i] = (float)numberOfCorrectEstimationClassForClassifiers[i] /(float)nodeList.size();
					}
					
					return combinationProbobilityOfTest;

				}
				catch (Exception e)
				{                               // TODO Auto-generated catch block
					e.printStackTrace();
				}

				return null;
			}

			private double[]calculateRelevancesProbobilities(SamplingAbstractClass currentSampling,int numberOfFeature) throws Exception
			{
				double[] relevanceProbobilities = new double[numberOfFeature];
				double[] relevances = getRelevanceOfFeatures(currentSampling, "");
				double minRelevance = WorkerUtilityClass.findMinForTheGivenVector(relevances);

				for(int i=0; i<relevanceProbobilities.length ;i++)
				{
					relevanceProbobilities[i] = relevances[i] - minRelevance;
				}
				double sumOfRelevance = WorkerUtilityClass.getSumOfTheElementsOfTheVector(relevanceProbobilities);

				for(int i=0; i<relevanceProbobilities.length ;i++)
				{
					relevanceProbobilities[i] = relevanceProbobilities[i] /sumOfRelevance;
				}
				return relevanceProbobilities;
			}

			private double[] getRelevanceOfFeatures(SamplingAbstractClass currentSampling, String type) throws Exception
			{
				int numberOfInstance = currentSampling.getTrainNodes().size();
				int numberOfFeatures = currentSampling.getTrainNodes().get(0).getContentList().get(0).getAttributeList().size();
				double[][] featuresVector = new double[numberOfFeatures][numberOfInstance];
				double[] relevances = new double[numberOfFeatures];
				double[] labelsVector = new double[numberOfInstance];

				
				for(int instanceIndex=0 ; instanceIndex<numberOfInstance; instanceIndex++)
				{
					ArrayList<Double> attributes = currentSampling.getTrainNodes().get(instanceIndex).getContentList().get(0).getAttributeList();

					for(int featureIndex=0; featureIndex<numberOfFeatures; featureIndex++)
					{
						featuresVector[featureIndex][instanceIndex] = (double)attributes.get(featureIndex);
					}

					labelsVector[instanceIndex] = currentSampling.getTrainNodes().get(instanceIndex).getClassOrder();
				}

				//CCAUtil.discretizeMatrix(featuresVector);
				for (int i=0; i<numberOfFeatures; i++)
				{       //MAKE SURE THAT NORMALIZATION IS DONE BIN=3
					relevances[i] =PengMIClass.getMutualInfo(featuresVector[i], labelsVector);
				}

				return relevances;
			}

			
			private double[] getRedundanciesOfFeatures(SamplingAbstractClass currentSampling, int selectedIndexes[], int evaluateIndexes[], String type)
			{
				int numberOfInstance = currentSampling.getTrainNodes().size();
				int numberOfFeatures = currentSampling.getTrainNodes().get(0).getContentList().get(0).getAttributeList().size();
				double[][] featuresVector = new double[numberOfInstance][numberOfFeatures];

				for(int instanceIndex=0 ; instanceIndex<numberOfInstance; instanceIndex++)
				{
					ArrayList<Double> attributes = currentSampling.getTrainNodes().get(instanceIndex).getContentList().get(0).getAttributeList();

					for(int featureIndex=0; featureIndex<attributes.size(); featureIndex++)
					{
						featuresVector[instanceIndex][featureIndex] = (double)attributes.get(featureIndex);
					}

				}
				
				//CCAUtil.discretizeMatrix(featuresVector);
				return PengMIClass.GetRedundancy(featuresVector, evaluateIndexes, selectedIndexes);
			}
			
			
			private int[][] SelectFeatureSubSets(double[]  probobilities, double relevanceRedundancyOfSelected[][])
			{
				boolean sameSubsetSelected ;
				int [][] selectedFeaturesSubSet = new int[this.numberOfClassifiers][this.numberOfFeaturesToBeSelected];
				int[] selecteds = new int[this.numberOfFeaturesToBeSelected];
				
				
				for (int classifierIndex=0 ; classifierIndex<this.numberOfClassifiers ; classifierIndex++)
				{
					sameSubsetSelected = true;
					int count = 0;
					
					while(sameSubsetSelected && count <100)
					{
						selecteds = tournamentSelection(probobilities,	this.numberOfFeaturesToBeSelected, relevanceRedundancyOfSelected[classifierIndex]);	
						sameSubsetSelected = false;

						for(int previousClassifiers=0; previousClassifiers<(classifierIndex-1); previousClassifiers++)
						{
							if(isSameArrays(selectedFeaturesSubSet[previousClassifiers],selecteds))
							{
								sameSubsetSelected = true;
								break;
							}
						}
						
						count++;
					}

					selectedFeaturesSubSet[classifierIndex] = selecteds;
					
					double[]redundancies = getRedundanciesOfFeatures(currentSampling, selecteds, selecteds, "");

					relevanceRedundancyOfSelected[classifierIndex][2] = WorkerUtilityClass.getAverage(redundancies); 
					relevanceRedundancyOfSelected[classifierIndex][3] = WorkerUtilityClass.getStd(redundancies, relevanceRedundancyOfSelected[classifierIndex][2]); 
                                                  
				}
				
				
				return selectedFeaturesSubSet;
			}

			private boolean isSameArrays(int[] first, int[] second)
			{
				java.util.List<int[]> temp =Arrays.asList(second);

				for(int i= 0; i<first.length; i++)
				{
					if(!temp.contains(first[i]))
					{
						return false;
					}
				}

				return true;
			}


			/*test with:
			 *      float[] p= {(float)0.1, (float)0.2, (float)0.1, (float)0.1,
		(float)0.05, (float)0.1, (float)0.1, (float)0.2, (float)0.05};
			 *      int m=4;
			 *
			 * */
			private int[] tournamentSelection(double[] relevantProbobilities, int numberToBeSelectedFeature, double relevanceRedundancyOfSelected[])
			{

				int[] selectedFeaturesIndexArr = new int[numberToBeSelectedFeature];
				ArrayList<Integer> selectedFeaturesIndexList = new ArrayList<Integer>();

				int selectedCount=0;

				if (numberToBeSelectedFeature==1)
				{
					selectedFeaturesIndexList.add(tournamentOne(relevantProbobilities));
				}
				else
				{
					while (selectedCount<numberToBeSelectedFeature)
					{
						int index1=rand.nextInt(relevantProbobilities.length);
						int index2=rand.nextInt(relevantProbobilities.length);

						if ((!selectedFeaturesIndexList.contains(index1))&&(!selectedFeaturesIndexList.contains(index2)))
						{
							if(relevantProbobilities[index1] > relevantProbobilities[index2])
							{
								selectedFeaturesIndexList.add(index1);
							}
							else
							{
								selectedFeaturesIndexList.add(index2);
							}

							selectedCount++;
						}
					}
				}

				selectedFeaturesIndexArr= WorkerUtilityClass.getIntegerArrayFromIntegerArrayList(selectedFeaturesIndexList);

				double selectedProbs[] = new double[selectedFeaturesIndexArr.length];
				
				for(int i=0; i< selectedFeaturesIndexArr.length ; i++)
				{
					selectedProbs[i] = relevantProbobilities[selectedFeaturesIndexArr[i]];
				}
				
				relevanceRedundancyOfSelected[0] = WorkerUtilityClass.getAverage(selectedProbs);
				relevanceRedundancyOfSelected[1] = WorkerUtilityClass.getStd(selectedProbs, relevanceRedundancyOfSelected[0]);
				
				return selectedFeaturesIndexArr;
			}

			/*test with:
			 *      float[] p= {(float)0.1, (float)0.2, (float)0.1, (float)0.1,
		(float)0.05, (float)0.1, (float)0.1, (float)0.2, (float)0.05};
			 */
		private int tournamentOne(double[] probobilities)
		{
				Random random = new Random();
				float randNumber = random.nextFloat();
				float sumNow = 0 ;

				int selectedIndex=0;

				for (int i=0; i<probobilities.length;i++)
				{
					sumNow += probobilities[i];

					if (randNumber <=sumNow)
					{
						selectedIndex = i ;
						break ;
					}
				}

				return selectedIndex;
			}

		public void choseTestDataSetMoveToTrainSet(SamplingAbstractClass currentSampling, double[][] combinationProbability)
		{
			HashMap<Integer, float[]> chosenNodes = new HashMap<Integer, float[]>();
			ArrayList<Integer> conflictNodes = new ArrayList<Integer>();
			
			ArrayList<NodeClass> nodeList = currentSampling.getTestNodes();
			int []index = new int [global.classSize];
			float [] maxProb = new float [global.classSize];

			// TODO(KADRIYEB) Do performans better

			float secondBestOfCombinationResults[] = new float[nodeList.size()];
			
			for(int instanceIndex=0; instanceIndex<nodeList.size();instanceIndex++) 
			{
				float second = 0;
				float max = (float) WorkerUtilityClass.findMaxForTheGivenVector(combinationProbability[instanceIndex]);
				
				for(int classIndex=0; classIndex<global.classSize; classIndex++)
				{
					float current = (float)combinationProbability[instanceIndex][classIndex];
					if(current != max && second <current) {
						second = current;
					}
				}
				
				secondBestOfCombinationResults[instanceIndex] = second;
			}
			
			
			for(int k= 0; k<classifierList.size(); k++)
			{
				for(int classIndex=0; classIndex<global.classSize; classIndex++)
				{
					index[classIndex] = -1;
					maxProb[classIndex] = Float.MIN_VALUE;	
					for(int instanceIndex=0; instanceIndex<nodeList.size();instanceIndex++)
					{
						// int estimatedClassOrder = currentSampling.getClassLabelEstimated(nodeList.get(instanceIndex));
						float currentP = (float)combinationProbability[instanceIndex][classIndex];
						float diversityFactor = (float) (((currentP-secondBestOfCombinationResults[instanceIndex])/currentP) * (LAMDA));
						float currentProb =	(float)combinationProbability[instanceIndex][classIndex] + diversityFactor;
	
						
						if(currentProb>maxProb[classIndex])
						{
							maxProb[classIndex] = currentProb;
							index[classIndex] = instanceIndex;
						}
					}
					
					// TODO(KADRIYEB) Mulltiply maxProb with confidence of classifier - class classIndex...	
					if(maxProb[classIndex]>  2.0/(global.classSize))
					{	
						float y[] = new float[2];
						y[0] = classIndex;
						y[1] = maxProb[classIndex];	
						int chosenNodeIndex = index[classIndex];
						
						if (!chosenNodes.containsKey(chosenNodeIndex))
						{
							chosenNodes.put(chosenNodeIndex, y);
						}
						else
						{
							float[] values = chosenNodes.get(chosenNodeIndex);
							
							if(values[1] < y[1])
							{
								chosenNodes.put(chosenNodeIndex, y);	
								conflictNodes.remove(chosenNodes);
							}
							// remove
							else if(values[1] == y[1] && values[0] != y[0])
							{
								System.out.println("Conflict Nodes ** conflictNodes.add("+chosenNodeIndex+") in CoTrainAlgorithm.java");
								conflictNodes.add(chosenNodeIndex);
							}
						}
					}
				}
			}
	
			//System.out.println(conflictNodes.size());
			
			for(int conflictNode: conflictNodes)
			{
				chosenNodes.remove(conflictNode);
			}
				
			for(int chosenNodeIndex :chosenNodes.keySet())
			{
				
				float[] y = chosenNodes.get(chosenNodeIndex);
				int estimatedClassOrder = (int)y[0];	
				NodeClass chosenNode = nodeList.get(chosenNodeIndex);
				currentSampling.setActualClassLabelForThisFoldForSpecifiedNode(chosenNode,estimatedClassOrder);
				currentSampling.addNodeToTrainingSet(chosenNode);
			}
		}
	 }

	@Override
	public void initialize() {
	}

	@Override
	public void Prepare() {
	}

	@Override
	public ResultClass TestAlgorithm() {
		return null;
	}

	@Override
	public void report() {
	}
}
