import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.List;

import weka.classifiers.bayes.*;
import weka.classifiers.trees.*;
import weka.core.Attribute;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.converters.ArffLoader;
import weka.filters.unsupervised.attribute.Remove;
import weka.filters.Filter;

public class Classifier {

    private weka.classifiers.Classifier classifier;

    public void setClassifier(weka.classifiers.Classifier classifier) {
        this.classifier = classifier;
    }

    public void validation(String arffFileName) throws Exception {
        // Declare attributes
        Attribute Attribute0 = new Attribute("id");
        Attribute Attribute1 = new Attribute("wordMatch");
        Attribute Attribute2 = new Attribute("lemmaMatch");
        Attribute Attribute3 = new Attribute("lemmaAndPosMatch");
        Attribute Attribute4 = new Attribute("bigramMatch");

        // Declare the class attribute along with its values
        FastVector entailmentValues = new FastVector(2);
        entailmentValues.addElement("YES");
        entailmentValues.addElement("NO");
        Attribute EntailementAttribute  = new Attribute("entailement", entailmentValues);

        FastVector fvWekaAttributes = new FastVector(5);
        fvWekaAttributes.addElement(Attribute0);
        fvWekaAttributes.addElement(Attribute1);
        fvWekaAttributes.addElement(Attribute2);
        fvWekaAttributes.addElement(Attribute3);
        fvWekaAttributes.addElement(Attribute4);
        fvWekaAttributes.addElement(EntailementAttribute);

        FastVector fvCrossValidation = new FastVector(4);
        fvCrossValidation.addElement(Attribute1);
        fvCrossValidation.addElement(Attribute2);
        fvCrossValidation.addElement(Attribute3);
        fvCrossValidation.addElement(Attribute4);
        fvCrossValidation.addElement(EntailementAttribute);

        // Create an empty training set
        Instances fixedDataset = new Instances("Relation", fvWekaAttributes, 10);

        // Set class index
        fixedDataset.setClassIndex(5); //Yes/no?

        // Read from file and create instances with featurs from file.
        ArffLoader arffLoader = new ArffLoader();
        File features = new File(arffFileName);
        arffLoader.setFile(features);

        // Instances dataSet = arffLoader.getDataSet(); //get the whole dataset
        Instances dataSet = arffLoader.getStructure();

        Instance instance = arffLoader.getNextInstance(dataSet);
        while(instance != null) {
            instance.setValue((Attribute)fvWekaAttributes.elementAt(5), instance.value(5));

            fixedDataset.add(instance);
            instance = arffLoader.getNextInstance(dataSet);
        }

        String[] options = new String[2];
        options[0] = "-R";
        options[1] = "1";

        Remove remove = new Remove();
        remove.setOptions(options);
        remove.setInputFormat(fixedDataset);

        Instances filteredSet = Filter.useFilter(fixedDataset, remove);

        NaiveBayes naiveBayes = new NaiveBayes();                    //67%

        Instances testSet = new Instances("Relation", fvCrossValidation, 10);
        testSet.setClassIndex(4);

        Instances trainingSet = new Instances("Relation", fvCrossValidation, 10);
        trainingSet.setClassIndex(4);

        List<Double> pairIDs = new ArrayList<Double>();
        for (int i=0; i<filteredSet.numInstances(); i++) {
            if(i<533){
                trainingSet.add(filteredSet.instance(i));
            }else {
                testSet.add(filteredSet.instance(i));
                pairIDs.add(fixedDataset.instance(i).value(0));
            }
        }

        if(classifier==null){
            classifier = naiveBayes;
            classifier.buildClassifier(trainingSet);
        }else{
            classifier.buildClassifier(trainingSet);
        }

        BufferedWriter writer = new BufferedWriter(new FileWriter(new File("bayesClassifier.txt")));
        writer.write("ranked: no\n");
        for(int i=0; i<testSet.numInstances(); i++){
            double[] test = classifier.distributionForInstance(testSet.instance(i));
            if(test[0]<test[1]){
                writer.write(pairIDs.get(i).intValue() +" YES\n");
            }else{
                writer.write(pairIDs.get(i).intValue() +" NO\n");
            }
        }
        writer.close();
    }

    public void crossValidation(String arffFileName) throws Exception {
        // Declare attributes
        Attribute Attribute0 = new Attribute("id");
        Attribute Attribute1 = new Attribute("wordMatch");
        Attribute Attribute2 = new Attribute("lemmaMatch");
        Attribute Attribute3 = new Attribute("lemmaAndPosMatch");
        Attribute Attribute4 = new Attribute("bigramMatch");

        // Declare the class attribute along with its values
        FastVector entailmentValues = new FastVector(2);
        entailmentValues.addElement("YES");
        entailmentValues.addElement("NO");
        Attribute EntailementAttribute  = new Attribute("entailement", entailmentValues);

        FastVector fvWekaAttributes = new FastVector(5);
        fvWekaAttributes.addElement(Attribute0);
        fvWekaAttributes.addElement(Attribute1);
        fvWekaAttributes.addElement(Attribute2);
        fvWekaAttributes.addElement(Attribute3);
        fvWekaAttributes.addElement(Attribute4);
        fvWekaAttributes.addElement(EntailementAttribute);

        FastVector fvCrossValidation = new FastVector(4);
        fvCrossValidation.addElement(Attribute1);
        fvCrossValidation.addElement(Attribute2);
        fvCrossValidation.addElement(Attribute3);
        fvCrossValidation.addElement(Attribute4);
        fvCrossValidation.addElement(EntailementAttribute);

        // Create an empty training set
        Instances fixedDataset = new Instances("Relation", fvWekaAttributes, 10);

        // Set class index
        fixedDataset.setClassIndex(5); //Yes/no?

        // Read from file and create instances with featurs from file.
        ArffLoader arffLoader = new ArffLoader();
        File features = new File(arffFileName);
        arffLoader.setFile(features);

        // Instances dataSet = arffLoader.getDataSet(); //get the whole dataset
        Instances dataSet = arffLoader.getStructure();

        Instance instance = arffLoader.getNextInstance(dataSet);
        while(instance != null) {
            instance.setValue((Attribute)fvWekaAttributes.elementAt(5), instance.value(5));

            fixedDataset.add(instance);
            instance = arffLoader.getNextInstance(dataSet);
        }

        String[] options = new String[2];
        options[0] = "-R";
        options[1] = "1";

        Remove remove = new Remove();
        remove.setOptions(options);
        remove.setInputFormat(fixedDataset);

        Instances filteredSet = Filter.useFilter(fixedDataset, remove);

        NaiveBayes naiveBayes = new NaiveBayes();                    //67%

        List<Double> accuracyScores = new ArrayList<Double>();
        for (int limit=80; limit<=800; limit+=80){
            Instances testSet = new Instances("Relation", fvCrossValidation, 10);
            testSet.setClassIndex(4);
            Instances trainingSet = new Instances("Relation", fvCrossValidation, 10);
            trainingSet.setClassIndex(4);

            List<Double> pairIDs = new ArrayList<Double>();
            for (int i=0; i<filteredSet.numInstances(); i++) {
                if(i>=limit-80 && i<limit){
                    testSet.add(filteredSet.instance(i));
                    pairIDs.add(fixedDataset.instance(i).value(0));
                }else {
                    trainingSet.add(filteredSet.instance(i));
                }
            }

            if(classifier==null){
                classifier = naiveBayes;
                classifier.buildClassifier(trainingSet);
            }else{
                classifier.buildClassifier(trainingSet);
            }

            BufferedWriter writer = new BufferedWriter(new FileWriter(new File("bayesClassifier.txt")));
            writer.write("ranked: no\n");
            for(int i=0; i<testSet.numInstances(); i++){
                double[] test = classifier.distributionForInstance(testSet.instance(i));
                if(test[0]<test[1]){
                    writer.write(pairIDs.get(i).intValue() +" YES\n");
                }else{
                    writer.write(pairIDs.get(i).intValue() +" NO\n");
                }
            }
            writer.close();

            RTEPhase3 rtePhase3 = new RTEPhase3();
            accuracyScores.add(eval_rte.evaluateLemmaMatching(rtePhase3.pairs, "bayesClassifier.txt"));
        }

        double sum = 0;
        for (Double accuracyScore : accuracyScores) {
            sum += accuracyScore;
        }
        sum = sum/10;

        System.out.println("Cross Validation: "+sum+"%");
    }
}
