import weka.classifiers.Evaluation;
import weka.classifiers.bayes.*;
import weka.classifiers.functions.*;
import weka.classifiers.lazy.KStar;
import weka.classifiers.meta.*;
import weka.classifiers.mi.CitationKNN;
import weka.classifiers.mi.MIOptimalBall;
import weka.classifiers.misc.VFI;
import weka.classifiers.pmml.consumer.NeuralNetwork;
import weka.classifiers.rules.*;
import weka.classifiers.trees.*;
import weka.classifiers.trees.ft.FTInnerNode;
import weka.classifiers.trees.ft.FTNode;
import weka.core.Attribute;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.converters.ArffLoader;
import weka.filters.Filter;
import weka.filters.unsupervised.attribute.Remove;
import xml.EntailementCorpus;
import xmlpreprocessed.EntailmentCorpus;
import xmlpreprocessed.Pair;
import xmlpreprocessed.XMLParser;

import java.io.*;
import java.util.ArrayList;
import java.util.List;

/**
 * Created by IntelliJ IDEA.
 * User: oye
 * Date: 20.11.11
 * Time: 15:20
 * To change this template use File | Settings | File Templates.
 */
public class RTEPhase4 {

    private weka.classifiers.Classifier classifier;

    xml.XMLParser parser = new xml.XMLParser();
    EntailementCorpus entCorpus = parser.getEntailementCorpus("blind-test-data/blind-test-data.xml");
    List<xml.Pair> pairs = entCorpus.getProperties();

    XMLParser preproParser = new XMLParser();
    EntailmentCorpus preproCorpus = preproParser.getEntailementCorpus("blind-test-data/preprocessed-blind-test-data.xml");
    List<Pair> preproPairs = preproCorpus.getProperties();

    public static void main(String[] args) {
        RTEPhase4 rtePhase4 = new RTEPhase4();
        RTEPhase3 rtePhase3 = new RTEPhase3();
        Classifier classifier = new Classifier();
        try {
            rtePhase4.buildFeatureARFF("features2.arff", rtePhase3.pairs);

            rtePhase4.setClassifier(new DecisionTable());
            rtePhase4.entailmentPred_DevData("features2.arff");
            System.out.println("Phase4 Validation of dev. data: " + eval_rte.evaluateLemmaMatching(rtePhase3.pairs, "phase4pred2.txt") + "%");

            rtePhase4.buildFeatureARFF("blindFeatures.arff", rtePhase4.preproPairs);
            rtePhase4.entailmentPrediction("features2.arff", "blindFeatures.arff");
            System.out.println("Predictions of test data done...: phase4prediction.txt");
        } catch (Exception e) {
            e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
        }
    }

    public void buildFeatureARFF(String fileName, List<Pair> pairs) throws IOException {
        BufferedWriter writer = new BufferedWriter(new FileWriter(new File(fileName)));
        RTEPhase3 rtePhase3 = new RTEPhase3();

        writer.write("% 1. Title: RTEPhase4\n");
        writer.write("%\n");
        writer.write("% 2. Sources:\n");
        writer.write("%      (a) Creator: J. K. Øye\n");
        writer.write("%      (b) Date: November, 2011\n");
        writer.write("%\n");
        writer.write("@RELATION RTEPhase4\n");
        writer.write("\n");
        writer.write("@ATTRIBUTE id  NUMERIC\n");
        writer.write("@ATTRIBUTE wordMatch  NUMERIC\n");
        writer.write("@ATTRIBUTE lemmaMatch  NUMERIC\n");
        writer.write("@ATTRIBUTE lemmaPOSMatch  NUMERIC\n");
        writer.write("@ATTRIBUTE bigramMatch  NUMERIC\n");
        writer.write("@ATTRIBUTE task  string\n");
        writer.write("@ATTRIBUTE entailment string\n");
        writer.write("\n");
        writer.write("@DATA\n");

        for(Pair pair : pairs){
            //System.out.println("PairID: "+pair.getId()+" Entailment: "+pair.getEntailment());
            writer.write(pair.getId()+",");
            double[] features = rtePhase3.featureExtraction(pair);
            for (double feature : features) {
                writer.write(feature+",");
            }
            writer.write(pair.getTask()+",");
            writer.write(pair.getEntailment()+"\n");
        }
        writer.close();
    }

    public void setClassifier(weka.classifiers.Classifier classifier) {
        this.classifier = classifier;
    }

    public void entailmentPrediction(String trainARFF, String testARFF) throws Exception {
        // Declare attributes
        Attribute Attribute0 = new Attribute("id");
        Attribute Attribute1 = new Attribute("wordMatch");
        Attribute Attribute2 = new Attribute("lemmaMatch");
        Attribute Attribute3 = new Attribute("lemmaAndPosMatch");
        Attribute Attribute4 = new Attribute("bigramMatch");

        // Declare the class attribute along with its values
        FastVector entailmentValues = new FastVector(2);
        entailmentValues.addElement("YES");
        entailmentValues.addElement("NO");
        Attribute EntailementAttribute  = new Attribute("entailement", entailmentValues);

        // Declare the class attribute along with its values
        FastVector taskValues = new FastVector(4);
        taskValues.addElement("IE");
        taskValues.addElement("IR");
        taskValues.addElement("QA");
        taskValues.addElement("SUM");
        Attribute TaskAttribute  = new Attribute("task", taskValues);

        FastVector fvWekaAttributes = new FastVector(6);
        fvWekaAttributes.addElement(Attribute0);
        fvWekaAttributes.addElement(Attribute1);
        fvWekaAttributes.addElement(Attribute2);
        fvWekaAttributes.addElement(Attribute3);
        fvWekaAttributes.addElement(Attribute4);
        fvWekaAttributes.addElement(TaskAttribute);
        fvWekaAttributes.addElement(EntailementAttribute);

        FastVector fvCrossValidation = new FastVector(5);
        fvCrossValidation.addElement(Attribute1);
        fvCrossValidation.addElement(Attribute2);
        fvCrossValidation.addElement(Attribute3);
        fvCrossValidation.addElement(Attribute4);
        fvCrossValidation.addElement(TaskAttribute);
        fvCrossValidation.addElement(EntailementAttribute);

        // Create an empty training set
        Instances fixedDataset = new Instances("Relation", fvWekaAttributes, 10);
        fixedDataset.setClassIndex(6); //Yes/no?

        // Create an empty training set
        Instances fixedTestDataset = new Instances("Relation", fvWekaAttributes, 10);
        fixedTestDataset.setClassIndex(6); //Yes/no?

        // Read from file and create instances with featurs from file.
        ArffLoader arffLoader = new ArffLoader();
        File features = new File(trainARFF);
        arffLoader.setFile(features);

        // Read from file and create instances with featurs from file.
        ArffLoader arffTestLoader = new ArffLoader();
        File testFeatures = new File(testARFF);
        arffTestLoader.setFile(testFeatures);

        // Instances dataSet = arffLoader.getDataSet(); //get the whole dataset
        Instances dataSet = arffLoader.getStructure();

        Instance instance = arffLoader.getNextInstance(dataSet);
        while(instance != null) {
            instance.setValue((Attribute)fvWekaAttributes.elementAt(5), instance.value(5));
            instance.setValue((Attribute)fvWekaAttributes.elementAt(6), instance.value(6));

            fixedDataset.add(instance);
            instance = arffLoader.getNextInstance(dataSet);
        }

        // Instances dataSet = arffLoader.getDataSet(); //get the whole dataset
        Instances dataTestSet = arffTestLoader.getStructure();

        Instance testInstance = arffTestLoader.getNextInstance(dataTestSet);
        while(testInstance != null) {
            testInstance.setValue((Attribute)fvWekaAttributes.elementAt(5), testInstance.value(5));
            testInstance.setValue((Attribute)fvWekaAttributes.elementAt(6), testInstance.value(6));

            fixedTestDataset.add(testInstance);
            testInstance = arffTestLoader.getNextInstance(dataTestSet);
        }

        String[] options = new String[2];
        options[0] = "-R";
        options[1] = "1";

        Remove remove = new Remove();
        remove.setOptions(options);
        remove.setInputFormat(fixedDataset);

        Instances filteredSet = Filter.useFilter(fixedDataset, remove);

        remove.setInputFormat(fixedTestDataset);

        Instances filteredTestSet = Filter.useFilter(fixedTestDataset, remove);

        NaiveBayes naiveBayes = new NaiveBayes();

        Instances testSet = new Instances("Relation", fvCrossValidation, 10);
        testSet.setClassIndex(5);

        Instances trainingSet = new Instances("Relation", fvCrossValidation, 10);
        trainingSet.setClassIndex(5);

        for (int i=0; i<filteredSet.numInstances(); i++) {
            trainingSet.add(filteredSet.instance(i));
        }

        List<Double> pairIDs = new ArrayList<Double>();
        for (int i=0; i<filteredTestSet.numInstances(); i++) {
            testSet.add(filteredTestSet.instance(i));
            pairIDs.add(fixedTestDataset.instance(i).value(0));
        }

        if(classifier==null){
            classifier = naiveBayes;
            classifier.buildClassifier(trainingSet);
        }else{
            classifier.buildClassifier(trainingSet);
        }

        BufferedWriter writer = new BufferedWriter(new FileWriter(new File("phase4predictions.txt")));
        writer.write("ranked: no\n");
        for(int i=0; i<testSet.numInstances(); i++){
            double[] test = classifier.distributionForInstance(testSet.instance(i));
            if(test[0]<test[1]){
                writer.write(pairIDs.get(i).intValue() +" YES\n");
            }else{
                writer.write(pairIDs.get(i).intValue() +" NO\n");
            }
        }
        writer.close();
    }

    public void entailmentPred_DevData(String arffFileName) throws Exception {
        // Declare attributes
        Attribute Attribute0 = new Attribute("id");
        Attribute Attribute1 = new Attribute("wordMatch");
        Attribute Attribute2 = new Attribute("lemmaMatch");
        Attribute Attribute3 = new Attribute("lemmaAndPosMatch");
        Attribute Attribute4 = new Attribute("bigramMatch");

        // Declare the class attribute along with its values
        FastVector entailmentValues = new FastVector(2);
        entailmentValues.addElement("YES");
        entailmentValues.addElement("NO");
        Attribute EntailementAttribute  = new Attribute("entailement", entailmentValues);

                // Declare the class attribute along with its values
        FastVector taskValues = new FastVector(4);
        taskValues.addElement("IE");
        taskValues.addElement("IR");
        taskValues.addElement("QA");
        taskValues.addElement("SUM");
        Attribute TaskAttribute  = new Attribute("task", taskValues);

        FastVector fvWekaAttributes = new FastVector(6);
        fvWekaAttributes.addElement(Attribute0);
        fvWekaAttributes.addElement(Attribute1);
        fvWekaAttributes.addElement(Attribute2);
        fvWekaAttributes.addElement(Attribute3);
        fvWekaAttributes.addElement(Attribute4);
        fvWekaAttributes.addElement(TaskAttribute);
        fvWekaAttributes.addElement(EntailementAttribute);

        FastVector fvCrossValidation = new FastVector(5);
        fvCrossValidation.addElement(Attribute1);
        fvCrossValidation.addElement(Attribute2);
        fvCrossValidation.addElement(Attribute3);
        fvCrossValidation.addElement(Attribute4);
        fvCrossValidation.addElement(TaskAttribute);
        fvCrossValidation.addElement(EntailementAttribute);

        // Create an empty training set
        Instances fixedDataset = new Instances("Relation", fvWekaAttributes, 10);

        // Set class index
        fixedDataset.setClassIndex(6); //Yes/no?

        // Read from file and create instances with featurs from file.
        ArffLoader arffLoader = new ArffLoader();
        File features = new File(arffFileName);
        arffLoader.setFile(features);

        // Instances dataSet = arffLoader.getDataSet(); //get the whole dataset
        Instances dataSet = arffLoader.getStructure();

        Instance instance = arffLoader.getNextInstance(dataSet);
        while(instance != null) {
            instance.setValue((Attribute)fvWekaAttributes.elementAt(5), instance.value(5));
            instance.setValue((Attribute)fvWekaAttributes.elementAt(6), instance.value(6));

            fixedDataset.add(instance);
            instance = arffLoader.getNextInstance(dataSet);
        }

        String[] options = new String[2];
        options[0] = "-R";
        options[1] = "1";

        Remove remove = new Remove();
        remove.setOptions(options);
        remove.setInputFormat(fixedDataset);

        Instances filteredSet = Filter.useFilter(fixedDataset, remove);

        NaiveBayes naiveBayes = new NaiveBayes();                    //67%

        Instances testSet = new Instances("Relation", fvCrossValidation, 10);
        testSet.setClassIndex(5);

        Instances trainingSet = new Instances("Relation", fvCrossValidation, 10);
        trainingSet.setClassIndex(5);

        List<Double> pairIDs = new ArrayList<Double>();
        for (int i=0; i<filteredSet.numInstances(); i++) {

            if(i<533){
                trainingSet.add(filteredSet.instance(i));
            }else {
                testSet.add(filteredSet.instance(i));
                pairIDs.add(fixedDataset.instance(i).value(0));
            }
        }

        if(classifier==null){
            classifier = naiveBayes;
            classifier.buildClassifier(trainingSet);
        }else{
            classifier.buildClassifier(trainingSet);
        }

        BufferedWriter writer = new BufferedWriter(new FileWriter(new File("phase4pred2.txt")));
        writer.write("ranked: no\n");
        for(int i=0; i<testSet.numInstances(); i++){
            double[] test = classifier.distributionForInstance(testSet.instance(i));
            if(test[0]<test[1]){
                writer.write(pairIDs.get(i).intValue() +" YES\n");
            }else{
                writer.write(pairIDs.get(i).intValue() +" NO\n");
            }
        }
        writer.close();
    }
}
