package WekaRelated;


import java.io.BufferedReader;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Random;

import weka.attributeSelection.AttributeSelection;
import weka.attributeSelection.CfsSubsetEval;
import weka.attributeSelection.GreedyStepwise;
import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.classifiers.bayes.BayesNet;
import weka.classifiers.bayes.BayesianLogisticRegression;
import weka.classifiers.bayes.NaiveBayes;
import weka.classifiers.functions.LibSVM;
import weka.classifiers.functions.Logistic;
import weka.classifiers.functions.MultilayerPerceptron;
import weka.classifiers.functions.RBFNetwork;
import weka.classifiers.functions.SMO;
import weka.classifiers.lazy.IBk;
import weka.classifiers.meta.AttributeSelectedClassifier;
import weka.classifiers.trees.J48;
import weka.classifiers.trees.RandomForest;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Utils;
import weka.core.converters.ConverterUtils.DataSource;
import weka.filters.Filter;
import Definitions.GraphClass;
import Global.ConstantVariable;
import Global.GlobalClass;


public class WekaAdaptorClass {
	
	static weka.classifiers.Classifier getNewLocalClassifier(String classifierStr, String options)
	{
		//TODO AS Will be revised.
		
		
		weka.classifiers.Classifier classifier = null ; 
		try
		{
			if (classifierStr.equals("WekaLogistic"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.LOGISTIC_REGRESSION_OPTIONS;
				
				classifier = new Logistic();
				((Logistic)classifier).setOptions(weka.core.Utils.splitOptions(options));
				//options = "-C 0.25 -M 2" ; 
			}
			else if (classifierStr.equals("WekaJ48"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.J48_OPTIONS;
				
				classifier = new weka.classifiers.trees.J48() ;
				((J48)classifier).setOptions(weka.core.Utils.splitOptions(options));
				 
			}
			else if (classifierStr.equals("WekaMLP"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.MLP_OPTIONS;
				
				classifier = new weka.classifiers.functions.MultilayerPerceptron() ;
				((MultilayerPerceptron) classifier).setOptions(weka.core.Utils.splitOptions(options))  ;
				//options = "-L 0.01 -M 0.7 -N 5000 -V 10 -S 0 -E 20 -H a" ; 
			}
			else if (classifierStr.equals("WekaSVM"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.SVM_OPTIONS;
				
				classifier = new weka.classifiers.functions.SMO();
				((SMO) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}
			else if (classifierStr.equals("WekaKNN"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.kNN_OPTIONS;
				
				classifier = new weka.classifiers.lazy.IBk();
				((IBk) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}
			else if (classifierStr.equals("WekaNaiveBayes"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.NAIVE_BAYES_OPTIONS;
				
				classifier = new weka.classifiers.bayes.NaiveBayes();
				((NaiveBayes) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}
			else if (classifierStr.equals("WekaRBFNetwork"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.RBF_NETWORK_OPTIONS;
				
				classifier = new weka.classifiers.functions.RBFNetwork();
				((RBFNetwork) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}
			else if (classifierStr.equals("WekaRandomForest"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.RANDOM_FOREST_OPTIONS;
				
				classifier = new weka.classifiers.trees.RandomForest();
				((RandomForest) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}			
			else if (classifierStr.equals("WekaLibSVM"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.LIB_SVM_OPTIONS;
				
				classifier = new weka.classifiers.functions.LibSVM();
				((LibSVM) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}
			else if (classifierStr.equals("WekaBayesianLR"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.BAYESIAN_LR_OPTIONS;				
				classifier = new weka.classifiers.bayes.BayesianLogisticRegression();
				((BayesianLogisticRegression) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}
			else if (classifierStr.equals("WekaBayesNet"))
			{
				if(options==null)
					options = ConstantVariable.WekaClassifiers.BAYES_NET_OPTIONS;
				
				classifier = new weka.classifiers.bayes.BayesNet();
				((BayesNet) classifier).setOptions(weka.core.Utils.splitOptions(options));				 
			}
			
		}
		catch (Exception e) 
		{
			e.printStackTrace();
			classifier = null ; 
		}

		return classifier ; 
		//NOT: Aslinda burada exception return etmek lazim. Zehra
	}

	
	public static Instances getInstancesFromFile(String sourcePath)
	{
		 BufferedReader reader;
		 
		 try {			 
			 reader = new BufferedReader(new FileReader(sourcePath));
			 Instances data = new Instances(reader);
			 reader.close();
			 return data;
			 
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}
	
	public static Evaluation  testWithTheGivenInstances(Instances data)
	{		 
		 weka.classifiers.Classifier localClassifier = null ;
		 
		 try {
			 // setting class attribute
			 data.setClassIndex(data.numAttributes() - 1);
			 //localClassifier = getNewLocalClassifier("Logistic", ConstantVariable.WekaClassifiers.LogisticRegressionOptions);
			 //localClassifier = getNewLocalClassifier("SVM", ConstantVariable.WekaClassifiers.SVMOptions);
			 localClassifier = getNewLocalClassifier(ConstantVariable.WekaClassifiers.SELECTED_WEKA_CLASSIFIER, null);
			 if(localClassifier == null)
				 System.out.println("Classifier NULL cikti napalim.");
			 localClassifier.buildClassifier(data);   // build classifier
			 			 
			 
			/* int numberOfWrongClassified=0;
			 
			 for(int i=0; i<data.numInstances(); i++)
			 {
				 if(classifyInstanceAndReturnTheEstimatedClassLabel(localClassifier, data.get(i))== false)
					 numberOfWrongClassified++;
			 }
			 
			 System.out.println("numberOfWrongClassified:"+numberOfWrongClassified);*/
				 
				 

			 Evaluation eval = new Evaluation(data);
			 eval.crossValidateModel(localClassifier, data, 2, new Random(1));			 
			 System.out.print("Acc:"+eval.pctCorrect());
			 System.out.print(" Inc.:"+eval.pctIncorrect());
			 System.out.println(" Err Rate:"+eval.errorRate());
			 System.out.println(" FMeasure(0):"+eval.fMeasure(0));
			 System.out.println(" FMeasure(1):"+eval.fMeasure(1));
			 System.out.println(" Precision(0):"+eval.precision(0));
			 System.out.println(" Precision(1):"+eval.precision(1));			 
			 System.out.println(" Recall(0):"+eval.recall(0));
			 System.out.println(" Recall(1):"+eval.recall(1));			 

			 System.out.println(eval.toSummaryString("\nResults\n======\n", false));
			 return eval;
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}
	
	public static Instances generateDatasetOnTheFly(GraphClass graph, GlobalClass global)
	{	
		int numberOfFeatures;
		
		numberOfFeatures = graph.getDataSet().get(0).getContentList().get(ConstantVariable.Common_ConstantVariables.DEFAULT_CONTENT_TYPE).getAttributeList().size();
		try {
			
			ArrayList<Attribute> attributesList = new ArrayList<Attribute>();

			for(int i=0; i<numberOfFeatures; i++)
			{
				attributesList.add(new Attribute("Attribute "+i));			
			}
			
			 // -- nominal
			
			ArrayList<String> classValues = new ArrayList<String>();
			
			for(int i=0; i<global.classList.size(); i++)
			{
				classValues.add(global.classList.get(i).getName());			
			}			
			
			attributesList.add(new Attribute("Class", classValues));			
			
			Instances data = new Instances("AS_ON_THE_FLY_DATASET", attributesList, 0);

			int sizeOfTheDatasetOfTheGraph = graph.getDataSet().size();

			for(int i=0; i<sizeOfTheDatasetOfTheGraph; i++)
			{
				double[] values = new double[numberOfFeatures+1];
				
				for(int j=0; j<numberOfFeatures; j++)
					values[j] = graph.getDataSet().get(i).getContentList().get(ConstantVariable.Common_ConstantVariables.DEFAULT_CONTENT_TYPE).getAttributeList().get(j);
				
				//GlobalClass.classList.get(graph.getDataSet().get(i).getClassOrder()).getName()
				
				values[numberOfFeatures] = graph.getDataSet().get(i).getClassOrder();
				// add
			    data.add(new DenseInstance(1.0, values));
			}

			return data;
			
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}
	/*
	public static Instances generateDatasetOnTheFlyForTheGivenNodeList(ArrayList<NodeClass> nodeList, )
	{	
		int numberOfFeatures;
		
		numberOfFeatures = graph.getDataSet().get(0).getContentList().get(ConstantVariable.Common_ConstantVariables.DEFAULT_CONTENT_TYPE).getAttributeList().size();
		try {
			
			ArrayList<Attribute> attributesList = new ArrayList<Attribute>();

			for(int i=0; i<numberOfFeatures; i++)
			{
				attributesList.add(new Attribute("Attribute "+i));			
			}
			
			 // -- nominal
			
			ArrayList<String> classValues = new ArrayList<String>();
			
			for(int i=0; i<GlobalClass.classList.size(); i++)
			{
				classValues.add(GlobalClass.classList.get(i).getName());			
			}			
			
			attributesList.add(new Attribute("Class", classValues));			
			
			Instances data = new Instances("AS_ON_THE_FLY_DATASET", attributesList, 0);

			int sizeOfTheDatasetOfTheGraph = graph.getDataSet().size();

			for(int i=0; i<sizeOfTheDatasetOfTheGraph; i++)
			{
				double[] values = new double[numberOfFeatures+1];
				
				for(int j=0; j<numberOfFeatures; j++)
					values[j] = graph.getDataSet().get(i).getContentList().get(ConstantVariable.Common_ConstantVariables.DEFAULT_CONTENT_TYPE).getAttributeList().get(j);
				
				//GlobalClass.classList.get(graph.getDataSet().get(i).getClassOrder()).getName()
				
				values[numberOfFeatures] = graph.getDataSet().get(i).getClassOrder();
				// add
			    data.add(new DenseInstance(1.0, values));
			}

			return data;
			
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return null;
	}	
	*/

	
	public static boolean classifyInstanceAndReturnTheEstimatedClassLabel(Classifier localClassifier, Instance testInstance)
	{
		double predicted;
		double actual;
		
		try {
			// predicted value
			predicted = localClassifier.classifyInstance(testInstance);
			actual = testInstance.classValue();

			if(predicted != actual)
			{
				System.out.print(" predicted:"+predicted);
				System.out.println(" actual:"+actual);
				return false;
			}
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return true; 
	}
	
	public static void importFromOtherFormatsTest()
	{	 
		 //...
		 DataSource source;
		try {
			source = new DataSource("/some/where/data.arff");
			Instances data = source.getDataSet();
			 // setting class attribute if the data format does not provide this information
			 // E.g., the XRFF format saves the class attribute information as well
			 if (data.classIndex() == -1)
			   data.setClassIndex(data.numAttributes() - 1);
			
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void manuallyCreateOptionsStringArrayTest()
	{
		String[] options = new String[2];
		 options[0] = "-R";
		 options[1] = "1";	
	}
	
	public static void usingSplitOptionsTest()
	{
		 try {
			String[] options = weka.core.Utils.splitOptions("-R 1");
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void usingTrainTestSeperatedDataTest()
	{
		 try {
		 Instances train = null;// = ...   // from somewhere
		 Instances test = null;// = ...    // from somewhere
		 // train classifier
		 Classifier cls = new J48();
		
			cls.buildClassifier(train);
		
		 // evaluate classifier and print some statistics
		 Evaluation eval = new Evaluation(train);
		 eval.evaluateModel(cls, test);
		 System.out.println(eval.toSummaryString("\nResults\n======\n", false));
		 }catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
		}
	}
	
	/**
	   * uses the meta-classifier
	   */
	  protected static void useClassifier(Instances data) throws Exception {
	    System.out.println("\n1. Meta-classfier");
	    AttributeSelectedClassifier classifier = new AttributeSelectedClassifier();
	    CfsSubsetEval eval = new CfsSubsetEval();
	    GreedyStepwise search = new GreedyStepwise();
	    search.setSearchBackwards(true);
	    J48 base = new J48();
	    classifier.setClassifier(base);
	    classifier.setEvaluator(eval);
	    classifier.setSearch(search);
	    Evaluation evaluation = new Evaluation(data);
	    evaluation.crossValidateModel(classifier, data, 10, new Random(1));
	    System.out.println(evaluation.toSummaryString());
	  }

	  /**
	   * uses the filter
	   */
	   public static Instances useWekaFilter(Instances data) throws Exception {
	    System.out.println("\n2. Filter");
	    weka.filters.supervised.attribute.AttributeSelection filter = new weka.filters.supervised.attribute.AttributeSelection();
	    CfsSubsetEval eval = new CfsSubsetEval();
	    GreedyStepwise search = new GreedyStepwise();
	    search.setSearchBackwards(true);
	    filter.setEvaluator(eval);
	    filter.setSearch(search);
	    filter.setInputFormat(data);
	    Instances newData = Filter.useFilter(data, filter);
	    //System.out.println(newData);
	    return newData;
	  }

	  /**
	   * uses the low level approach
	   */
	  protected static void useLowLevel(Instances data) throws Exception {
	    System.out.println("\n3. Low-level");
	    AttributeSelection attsel = new AttributeSelection();
	    CfsSubsetEval eval = new CfsSubsetEval();
	    GreedyStepwise search = new GreedyStepwise();
	    search.setSearchBackwards(true);
	    attsel.setEvaluator(eval);
	    attsel.setSearch(search);
	    attsel.SelectAttributes(data);
	    int[] indices = attsel.selectedAttributes();
	    System.out.println("selected attribute indices (starting with 0):\n" + Utils.arrayToString(indices));
	  }

	  /**
	   * takes a dataset as first argument
	   *
	   * @param args        the commandline arguments
	   * @throws Exception  if something goes wrong
	   */
	  public static void mainkkk(String[] args) throws Exception {
	    // load data
	    System.out.println("\n0. Loading data");
	    DataSource source = new DataSource(args[0]);
	    Instances data = source.getDataSet();
	    if (data.classIndex() == -1)
	      data.setClassIndex(data.numAttributes() - 1);

	    // 1. meta-classifier
	    useClassifier(data);

	    // 2. filter
	    useWekaFilter(data);

	    // 3. low-level
	    useLowLevel(data);
	  }
}
