package weka.classifiers.collective.meta;

import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Vector;
import java.util.logging.Level;
import java.util.logging.Logger;

import weka.classifiers.AbstractClassifier;
import weka.classifiers.Classifier;
import weka.classifiers.collective.CollectiveRandomizableSingleClassifierEnhancer;
import weka.core.AdditionalMeasureProducer;
import weka.core.Attribute;
import weka.core.Capabilities;
import weka.core.CapabilitiesHandler;
import weka.core.DenseInstance;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.SelectedTag;
import weka.core.Tag;
import weka.core.Utils;
import weka.core.Capabilities.Capability;

public class OracleClassifier extends CollectiveRandomizableSingleClassifierEnhancer implements
        OptionHandler, Serializable, AdditionalMeasureProducer,
        CapabilitiesHandler {

    /**
     *
     */
    private static final long serialVersionUID = 1L;
    /** Variable for storing the generated rule extraction classifier. */
    private Classifier oracleClassifier = defaultOracleClassifier();
    private Classifier extractedClassifier = defaultExtractedClassifier();
    private boolean addProbabilities = false, model2file = false;
    private Instances savedData;
    public static Tag[] TAGS_DatasetUse = {
        new Tag(1, "I", "Induction (I): maximizes training accuracy"),
        new Tag(2, "E", "Extraction (E): maximizes training fidelity"),
        new Tag(3, "X", "Explanation (X): maximizes test fidelity"),
        new Tag(4, "IE",
        "Exduction (IE): maximizes training accuracy and training fidelity"),
        new Tag(5, "IX",
        "Indanation (IX): maximizes training accuracy and test fidelity"),
        new Tag(6, "EX",
        "Extanation (EX): maximizes training fidelity and test fidelity"),
        new Tag(
        7,
        "IEX",
        "Indextanation (IEX): maximizes training accuracy, training fidelity and test fidelity"),};
    public int m_DatasetUse = 1;
    private int m_numCopies = 1;
    private int m_Combiner = 1;
    private int m_RandomSeed;
    private int m_NearestNeighbors = 3;
    private int m_Sampling = 1;
    private double m_AlphaWeight;
    private static Tag[] TAGS_Sampling = {
        new Tag(1, "N", "Normal, no sampling"),
        new Tag(2, "S", "Sampling of X and/or E, adding NumCopies of copies of each instance with classes distributed based on the base classifiers predictions"),
        new Tag(3, "Sb", "SMOTEing of X and/or E, adding NumCopies of new instances similar to each instance and its NumNeighbors of instances with classes distributed based on the base classifiers predictions"),
        new Tag(4, "Se", "SMOTEing of X and/or E, adding NumCopies of new instances similar to each instance and its NumNeighbors of instances with classes distributed based on the ensemble prediction"),
        new Tag(5, "W", "Sampling of X and/or E, making NumClasses of copies of each instance with weights per class according to the ensemble distribution"),};
    private static Tag[] TAGS_Combiner = {
        new Tag(1, "AVG", "Average (AVG)"),
        new Tag(2, "MV", "Majority Vote (MV)"),};

    public OracleClassifier() {
        m_UseInsight = true;
    }

    /**
     * String describing default classifier.
     */
    protected String defaultExtractedClassifierString() {

        return "weka.classifiers.trees.J48";
    }

    public String defaultOracleClassifierString() {
        return "weka.classifiers.trees.RandomForest";
    }

    /**
     * String describing default classifier.
     */
    protected Classifier defaultExtractedClassifier() {

        return new weka.classifiers.trees.J48();
    }

    public AbstractClassifier defaultOracleClassifier() {
        return new weka.classifiers.trees.RandomForest();
    }

    /**
     * Returns a string describing classifier
     *
     * @return a description suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String globalInfo() {

        return "Class for extracting rules from one (opaque) classifier with another"
                + "(transitive) classifier.";
    }

    /**
     * Returns the revision string.
     *
     * @return the revision
     */
    public String getRevision() {
        return RevisionUtils.extract("$Revision: 1.1 $");
    }

    /**
     * Generates a classifier.
     *
     * @param data
     *            set of instances serving as training data
     * @throws Exception
     *             if the classifier has not been generated successfully
     * @todo Implement this weka.classifiers.Classifier method
     *
     *       public void buildClassifier(Instances data) throws Exception {
     *       m_Trainset = new Instances(data);
     *       baseClassifier.buildClassifier(m_Trainset);
     *
     *       for (int i = 0; i < m_Trainset.numInstances(); i++) { double
     *       classification =
     *       baseClassifier.classifyInstance(m_Trainset.instance(i)); int numC =
     *       m_Trainset.numClasses(); if (classification !=
     *       m_Trainset.instance(i).classValue()) {
     *       m_Trainset.instance(i).setClassValue(classification); } assert
     *       (numC == m_Trainset.numClasses()); } if
     *       (getAddProbabilityColumns()) { int firstModel =
     *       m_Trainset.numAttributes() - 1, column = firstModel; for (int i =
     *       0; i < m_Trainset.numClasses(); i++) {
     *       m_Trainset.insertAttributeAt(new Attribute("probability_" + i),
     *       column++); } for (int i = 0; i < data.numInstances(); i++) { column
     *       = firstModel; double[] pred =
     *       baseClassifier.distributionForInstance(data.instance(i)); for (int
     *       n = 0; n < m_Trainset.numClasses(); n++) {
     *       m_Trainset.instance(i).setValue(column++, pred[n]); } } }
     *
     *       m_Trainset.deleteWithMissingClass();
     *
     *       getExtractedClassifier().buildClassifier(m_Trainset); savedData =
     *       m_Trainset; }
     */
    /**
     * Classifies a given instance using the selected classifier.
     *
     * @param instance
     *            the instance to be classified
     * @exception Exception
     *                if instance could not be classified successfully
     */
    public double[] getDistribution(Instance instance) throws Exception {
        Instance inst = new DenseInstance(instance);
        inst.setDataset(instance.dataset());
        if (getAddProbabilityColumns()) {
            int firstModel = instance.numAttributes() - 1, column = firstModel;

            inst = new DenseInstance(savedData.lastInstance().weight(),
                    savedData.lastInstance().toDoubleArray());
            inst.setDataset(savedData);
            for (int i = 0; i < instance.numAttributes(); i++) {
                inst.setValue(i, instance.value(i));
            }
            column = firstModel;
            double[] pred = getOracleClassifier().distributionForInstance(inst);
            for (int n = 0; n < inst.numClasses(); n++) {
                inst.setValue(column++, pred[n]);
            }
        }
        return getExtractedClassifier().distributionForInstance(inst);
    }

    @Override
    protected void build() throws Exception {
        buildClassifier();
    }

    @Override
    protected void buildClassifier() throws Exception {
        boolean useI = false, useE = false, useX = false, trainBase = true;
        switch (m_DatasetUse) {
            case 1: //new Tag(1, "I", "Induction (I): maximizes training accuracy")
                useI = true;
                trainBase = m_UseInsight;
                break;
            case 2: //new Tag(2, "E", "Extraction (E): maximizes training fidelity"),
                useE = true;
                break;
            case 3: //new Tag(3, "X", "Explanation (X): maximizes test fidelity"),
                useX = true;
                break;
            case 4: //new Tag(4, "IE", "Exduction (IE): maximizes training accuracy and training fidelity"),
                useI = true;
                useE = true;
                break;
            case 5: //new Tag(5, "IX", "Indanation (IX): maximizes training accuracy and test fidelity"),
                useI = true;
                useX = true;
                break;
            case 6: //new Tag(6, "EX", "Extanation (EX): maximizes training fidelity and test fidelity"),
                useE = true;
                useX = true;
                break;
            case 7: //new Tag(7, "IEX", "Indextanation (IEX): maximizes training accuracy, training fidelity and test fidelity"),
                useI = true;
                useE = true;
                useX = true;
                break;
        }

        // build the base classifier using training data.
        if (trainBase) {
            getOracleClassifier().buildClassifier(deepInstancesCopy(m_Trainset));
        }

        double xWeight = (m_Trainset.numInstances() + 0.0) / (m_Testset.numInstances() + m_Trainset.numInstances() + 0.0);
        double ieWeight = xWeight;
        xWeight *= m_AlphaWeight;
        xWeight = (xWeight / (xWeight + ieWeight));
        ieWeight = 1 - xWeight;
        if (xWeight <= 0) {
            xWeight = 1;
            ieWeight = 1;
        } else {
            xWeight = xWeight / ieWeight;
            ieWeight = ieWeight / ieWeight;
        }

        Instances newData;
        if (useI) {
            newData = deepInstancesCopy(m_Trainset, ieWeight);
        } else {
            /*ArrayList<Attribute> attributes = new ArrayList<Attribute>(
            m_Trainset.numAttributes());
            for (int i = 0; i < m_Trainset.numAttributes(); i++)
            attributes.add(m_Trainset.attribute(i));
            newData = new Instances(m_Trainset.relationName(), attributes, 0);
            newData.setClassIndex(m_Trainset.classIndex());*/
            newData = m_Trainset.stringFreeStructure();
        }

        switch (m_Sampling) {
            case 1: //new Tag(1, "N", "Normal, no sampling")
                if (useE) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Trainset, ieWeight));
                }
                if (useX) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Testset, xWeight));
                }
                break;
            case 2: //new Tag(2, "S", "Sampling of X and/or E, adding NumCopies of copies of each instance with classes distributed based on the base classifiers predictions"),
                if (useE) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Trainset, ieWeight));
                }
                newData = addBaseClassifierPredictions(newData, SampleInstances(deepInstancesCopy(m_Trainset, ieWeight)));
                if (useX) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Testset, xWeight));
                }
                newData = addBaseClassifierPredictions(newData, SampleInstances(deepInstancesCopy(m_Testset, xWeight)));
                break;
            case 3: //new Tag(3, "Sb", "SMOTEing of X and/or E, adding NumCopies of new instances similar to each instance and its NumNeighbors of instances with classes distributed based on the base classifiers predictions"),
                if (useE) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Trainset, ieWeight));
                }
                newData = addBaseClassifierPredictions(newData, doSMOTE(deepInstancesCopy(m_Trainset, ieWeight)));
                if (useX) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Testset, xWeight));
                }
                newData = addBaseClassifierPredictions(newData, doSMOTE(deepInstancesCopy(m_Testset, xWeight)));
                break;
            case 4: //new Tag(4, "Se", "SMOTEing of X and/or E, making NumCopies of new instances similar to each instance and its NumNeighbors of instances with classes distributed based on the ensemble prediction"),
                if (useE) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Trainset, ieWeight));
                }
                newData = addEnsemblePredictions(newData, doSMOTE(deepInstancesCopy(m_Trainset, ieWeight)));
                if (useX) {
                    newData = addEnsemblePredictions(newData, deepInstancesCopy(m_Testset, xWeight));
                }
                newData = addEnsemblePredictions(newData, doSMOTE(deepInstancesCopy(m_Testset, xWeight)));
                break;
            case 5: //new Tag(5, "W", "Sampling of X and/or E, making NumClasses of copies of each instance with weights per class according to the ensemble distribution"), };
                if (useE) {
                    newData = addWithWeights(newData, deepInstancesCopy(m_Trainset, ieWeight));
                }
                if (useX) {
                    newData = addWithWeights(newData, deepInstancesCopy(m_Testset, xWeight));
                }
                break;
        }

        if (getAddProbabilityColumns()) {
            int firstModel = newData.numAttributes() - 1, column = firstModel;
            for (int i = 0; i < newData.numClasses(); i++) {
                newData.insertAttributeAt(new Attribute("probability_" + i),
                        column++);
            }
            for (int i = 0; i < newData.numInstances(); i++) {
                column = firstModel;
                double[] pred = getOracleClassifier().distributionForInstance(newData.instance(i));
                for (int n = 0; n < newData.numClasses(); n++) {
                    newData.instance(i).setValue(column++, pred[n]);
                }
            }
        }

        newData.deleteWithMissingClass();

        getExtractedClassifier().buildClassifier(newData);
        savedData = newData;

        if (model2file) {
            printModelToFile();
        }
        initializeDiversityMeasureImplementation(this, getTrainingSet());
    }

    private Instances deepInstancesCopy(Instances dataset) {
        return deepInstancesCopy(dataset, 1);
    }

    private Instances deepInstancesCopy(Instances oldDataset, double weight) {
        Instances newDataset = oldDataset.stringFreeStructure();
        for (int i = 0; i < oldDataset.numInstances(); i++) {
            Instance denseInstance = new DenseInstance(oldDataset.instance(i));
            denseInstance.setDataset(newDataset);
            denseInstance.setWeight(weight);
            newDataset.add(denseInstance);
        }
        newDataset.setClassIndex(oldDataset.classIndex());
        return newDataset;
    }

    private Instances SampleInstances(Instances source) throws Exception {
        Instances toAdd = source.stringFreeStructure();
        for (int i = 0; i < source.numInstances(); i++) {
            Instance inst = source.instance(i);
            for (int j = 0; j < getNumCopies(); j++) {
                toAdd.add((Instance) inst.copy());
            }
        }
        return toAdd;
    }

    protected Instances addEnsemblePredictions(Instances newData, Instances toAdd)
            throws Exception {
        for (int i = 0; i < toAdd.numInstances(); i++) {
            Instance inst = toAdd.instance(i);
            double classification = getOracleClassifier().classifyInstance(inst);
            int numC = newData.numClasses();
            if (classification != inst.classValue()) {
                inst.setClassValue(classification);
            }
            newData.add(inst);
            assert (numC == newData.numClasses());
        }
        return newData;
    }

    protected Instances addWithWeights(Instances newData, Instances source)
            throws Exception {
        for (int i = 0; i < source.numInstances(); i++) {
            double[] distribution = getOracleClassifier().distributionForInstance(source.instance(i));
            for (int j = 0; j < source.numClasses(); j++) {
                if (distribution[j] > 0) {
                    Instance inst = (Instance) source.instance(i).copy();
                    inst.setClassValue(j);
                    inst.setWeight(distribution[j]);
                    newData.add(inst);
                }
            }
        }
        return newData;
    }

    protected Instances addBaseClassifierPredictions(Instances newData, Instances source)
            throws Exception {
        List<Classifier> baseClassifiers = getOracleClassifier().getEnsembleMembers();
        Random rand = new Random();

        for (int i = 0; i < source.numInstances(); i++) {
            Instance inst = source.instance(i);
            double[] classification = new double[baseClassifiers.size()];
            double[][] distribution = new double[baseClassifiers.size()][];
            double[] sumClass = new double[source.numClasses()];
            double[] sumDist = new double[source.numClasses()];
            for (int j = 0; j < baseClassifiers.size(); j++) {
                classification[j] = baseClassifiers.get(j).classifyInstance(inst);
                distribution[j] = baseClassifiers.get(j).distributionForInstance(inst);
                sumClass[(int) classification[j]] += 1.0 / baseClassifiers.size();
                for (int d = 0; d < source.numClasses(); d++) {
                    sumDist[d] += distribution[j][d] / baseClassifiers.size();
                }
            }

            int numC = newData.numClasses();
            double[] classProb = new double[source.numClasses()], distr = null;
            double newClass = 0, maxProb = Double.MIN_VALUE;
            switch (m_Combiner) {
                case 1:
                    distr = sumDist;
                    break;
                case 2:
                    distr = sumClass;
                    break;
            }
            for (int d = 0; d < source.numClasses(); d++) {
                classProb[d] = rand.nextDouble() * distr[d];
                if (classProb[d] > maxProb) {
                    maxProb = classProb[d];
                    newClass = d;
                }
            }

            inst.setClassValue(newClass);
            newData.add(inst);
            assert (numC == newData.numClasses());
        }
        return newData;
    }

    @SuppressWarnings("unchecked")
    protected Instances doSMOTE(Instances sample) throws Exception {
        Instances synthetics = sample.stringFreeStructure();
        /*
         * int minIndex = 0; int min = Integer.MAX_VALUE; if
         * (m_DetectMinorityClass) { // find minority class int[] classCounts =
         * getInputFormat().attributeStats(
         * getInputFormat().classIndex()).nominalCounts; for (int i = 0; i <
         * classCounts.length; i++) { if (classCounts[i] != 0 && classCounts[i]
         * < min) { min = classCounts[i]; minIndex = i; } } } else { String
         * classVal = getClassValue(); if (classVal.equalsIgnoreCase("first")) {
         * minIndex = 1; } else if (classVal.equalsIgnoreCase("last")) {
         * minIndex = getInputFormat().numClasses(); } else { minIndex =
         * Integer.parseInt(classVal); } if (minIndex >
         * getInputFormat().numClasses()) { throw new Exception(
         * "value index must be <= the number of classes"); } minIndex--; //
         * make it an index }
         *
         * int nearestNeighbors; if (min <= getNearestNeighbors()) {
         * nearestNeighbors = min - 1; } else { nearestNeighbors =
         * getNearestNeighbors(); } if (nearestNeighbors < 1) throw new
         * Exception("Cannot use 0 neighbors!");
         *
         * // compose minority class dataset // also push all dataset instances
         * Instances sample = getInputFormat().stringFreeStructure();
         * Enumeration instanceEnum = getInputFormat().enumerateInstances();
         * while (instanceEnum.hasMoreElements()) { Instance instance =
         * (Instance) instanceEnum.nextElement(); push((Instance)
         * instance.copy()); if ((int) instance.classValue() == minIndex) {
         * sample.add(instance); } }
         */

        Enumeration instanceEnum;

        // compute Value Distance Metric matrices for nominal features
        Map<Attribute, double[][]> vdmMap = new HashMap<Attribute, double[][]>();
        Enumeration<Attribute> attrEnum = sample.enumerateAttributes();
        while (attrEnum.hasMoreElements()) {
            Attribute attr = attrEnum.nextElement();
            if (!attr.equals(sample.classAttribute())) {
                if (attr.isNominal() || attr.isString()) {
                    double[][] vdm = new double[attr.numValues()][attr.numValues()];
                    vdmMap.put(attr, vdm);
                    int[] featureValueCounts = new int[attr.numValues()];
                    int[][] featureValueCountsByClass = new int[sample.classAttribute().numValues()][attr.numValues()];
                    instanceEnum = sample.enumerateInstances();
                    while (instanceEnum.hasMoreElements()) {
                        Instance instance = (Instance) instanceEnum.nextElement();
                        int value = (int) instance.value(attr);
                        int classValue = (int) instance.classValue();
                        featureValueCounts[value]++;
                        featureValueCountsByClass[classValue][value]++;
                    }
                    for (int valueIndex1 = 0; valueIndex1 < attr.numValues(); valueIndex1++) {
                        for (int valueIndex2 = 0; valueIndex2 < attr.numValues(); valueIndex2++) {
                            double sum = 0;
                            for (int classValueIndex = 0; classValueIndex < sample.numClasses(); classValueIndex++) {
                                double c1i = (double) featureValueCountsByClass[classValueIndex][valueIndex1];
                                double c2i = (double) featureValueCountsByClass[classValueIndex][valueIndex2];
                                double c1 = (double) featureValueCounts[valueIndex1];
                                double c2 = (double) featureValueCounts[valueIndex2];
                                double term1 = c1i / c1;
                                double term2 = c2i / c2;
                                sum += Math.abs(term1 - term2);
                            }
                            vdm[valueIndex1][valueIndex2] = sum;
                        }
                    }
                }
            }
        }

        // use this random source for all required randomness
        Random rand = new Random(getRandomSeed());

        /*
         * // find the set of extra indices to use if the percentage is not
         * evenly // divisible by 100 List extraIndices = new LinkedList();
         * double percentageRemainder = (getPercentage() / 100) -
         * Math.floor(getPercentage() / 100.0); int extraIndicesCount = (int)
         * (percentageRemainder * sample .numInstances()); if (extraIndicesCount
         * >= 1) { for (int i = 0; i < sample.numInstances(); i++) {
         * extraIndices.add(i); } } Collections.shuffle(extraIndices, rand);
         * extraIndices = extraIndices.subList(0, extraIndicesCount); Set
         * extraIndexSet = new HashSet(extraIndices);
         */

        // the main loop to handle computing nearest neighbors and generating
        // SMOTE
        // examples from each instance in the original minority class data
        Instance[] nnArray = new Instance[m_numCopies];
        for (int i = 0; i < sample.numInstances(); i++) {
            Instance instanceI = sample.instance(i);
            // find k nearest neighbors for each instance
            List<Object[]> distanceToInstance = new LinkedList<Object[]>();
            for (int j = 0; j < sample.numInstances(); j++) {
                Instance instanceJ = sample.instance(j);
                if (i != j) {
                    double distance = 0;
                    attrEnum = sample.enumerateAttributes();
                    while (attrEnum.hasMoreElements()) {
                        Attribute attr = (Attribute) attrEnum.nextElement();
                        if (!attr.equals(sample.classAttribute())) {
                            double iVal = instanceI.value(attr);
                            double jVal = instanceJ.value(attr);
                            if (attr.isNumeric()) {
                                distance += Math.pow(iVal - jVal, 2);
                            } else {
                                distance += ((double[][]) vdmMap.get(attr))[(int) iVal][(int) jVal];
                            }
                        }
                    }
                    distance = Math.pow(distance, .5);
                    distanceToInstance.add(new Object[]{distance, instanceJ});
                }
            }

            // sort the neighbors according to distance
            Collections.sort(distanceToInstance, new Comparator() {

                public int compare(Object o1, Object o2) {
                    double distance1 = (Double) ((Object[]) o1)[0];
                    double distance2 = (Double) ((Object[]) o2)[0];
                    return (int) Math.ceil(distance1 - distance2);
                }
            });

            // populate the actual nearest neighbor instance array
            Iterator entryIterator = distanceToInstance.iterator();
            int j = 0;
            while (entryIterator.hasNext() && j < getNearestNeighbors()) {
                nnArray[j] = (Instance) ((Object[]) entryIterator.next())[1];
                j++;
            }

            // create synthetic examples
            int n = m_numCopies;
            while (n > 0) {
                double[] values = new double[sample.numAttributes()];
                int nn = rand.nextInt(getNearestNeighbors());
                attrEnum = sample.enumerateAttributes();
                while (attrEnum.hasMoreElements()) {
                    Attribute attr = (Attribute) attrEnum.nextElement();
                    if (!attr.equals(sample.classAttribute())) {
                        if (attr.isNumeric()) {
                            double dif = nnArray[nn].value(attr)
                                    - instanceI.value(attr);
                            double gap = rand.nextDouble();
                            values[attr.index()] = (double) (instanceI.value(attr) + gap * dif);
                        } else if (attr.isDate()) {
                            double dif = nnArray[nn].value(attr)
                                    - instanceI.value(attr);
                            double gap = rand.nextDouble();
                            values[attr.index()] = (long) (instanceI.value(attr) + gap * dif);
                        } else {
                            int[] valueCounts = new int[attr.numValues()];
                            int iVal = (int) instanceI.value(attr);
                            valueCounts[iVal]++;
                            for (int nnEx = 0; nnEx < getNearestNeighbors(); nnEx++) {
                                int val = (int) nnArray[nnEx].value(attr);
                                valueCounts[val]++;
                            }
                            int maxIndex = 0;
                            int max = Integer.MIN_VALUE;
                            for (int index = 0; index < attr.numValues(); index++) {
                                if (valueCounts[index] > max) {
                                    max = valueCounts[index];
                                    maxIndex = index;
                                }
                            }
                            values[attr.index()] = maxIndex;
                        }
                    }
                }
                Instance synthetic = new DenseInstance(1.0, values);
                synthetics.add(synthetic);
                n--;
            }
        }
        return synthetics;
    }

    public Capabilities getCapabilities() {
        Capabilities result;

        if (getOracleClassifier() != null) {
            result = getOracleClassifier().getCapabilities();
        } else {
            result = new Capabilities(this);
        }

        if (getExtractedClassifier() != null) {
            result.and(getExtractedClassifier().getCapabilities());
        }

        // set dependencies
        for (Capability cap : Capability.values()) {
            result.enableDependency(cap);
        }

        result.setOwner(this);

        return result;
    }

    /**
     * Returns an enumeration describing the available options
     *
     * @return an enumeration of all the available options
     */
    public Enumeration<Option> listOptions() {

        Vector<Option> newVector = new Vector<Option>();

        newVector.addElement(new Option(
                "\tFull name of oracle classifier.\n"
                + "\t(default: " + defaultOracleClassifierString() + ")",
                "O", 1, "-O"));

        newVector.addElement(new Option(
                "",
                "", 0, "\nOptions specific to oracle classifier "
                + getOracleClassifier().getClass().getName() + ":"));
        Enumeration<Option> enu = ((OptionHandler) getOracleClassifier()).listOptions();
        while (enu.hasMoreElements()) {
            newVector.addElement(enu.nextElement());
        }

        newVector.addElement(new Option(
                "\tFull name of extraction classifier.\n" + "\t(default: "
                + defaultExtractedClassifierString() + ")", "E", 1,
                "-E"));

        newVector.addElement(new Option("", "", 0,
                "\nOptions specific to classifier "
                + getExtractedClassifier().getClass().getName() + ":"));
        enu = ((OptionHandler) getExtractedClassifier()).listOptions();
        while (enu.hasMoreElements()) {
            newVector.addElement(enu.nextElement());
        }

        newVector.addElement(new Option(addProbabilityColumnsTipText(), "P", 0,
                "-P"));

        newVector.addElement(new Option(ModelToFileTipText(), "F", 1, "-F"));

        newVector.addElement(new Option(SamplingTipText(), "S", 0, "-S"));

        newVector.addElement(new Option(CombinerTipText(), "C", 0, "-C"));

        newVector.addElement(new Option(NumCopiesTipText(), "N", 1, "-N"));

        newVector.addElement(new Option(AlphaWeightTipText(), "W", 1, "-W"));

        newVector.addElement(new Option("\tSpecifies the random number seed\n"
                + "\t(default 1)", "R", 1, "-R <num>"));

        newVector.addElement(new Option(
                "\tSpecifies the number of nearest neighbors to use.\n"
                + "\t(default 5)\n", "K", 1, "-K <nearest-neighbors>"));

        newVector.addElement(new Option(datasetUseTipText(), "D", 1, "-D "
                + Tag.toOptionList(TAGS_DatasetUse)));

        Enumeration<Option> en = super.listOptions();
        while (en.hasMoreElements()) {
            newVector.addElement(en.nextElement());
        }

        return newVector.elements();
    }

    /**
     * Parses a given list of options. Valid options are:
     * <p>
     *
     * -B classifierstring <br>
     * Classifierstring should contain the full class name of a scheme included
     * for selection followed by options to the classifier (required, option
     * should be used once for each classifier).
     * <p>
     *
     * @param options
     *            the list of options as an array of strings
     * @exception Exception
     *                if an option is not supported
     */
    public void setOptions(String[] options) throws Exception {

        String classifierName = Utils.getOption('O', options);

        if (classifierName.length() > 0) {

            // This is just to set the classifier in case the option
            // parsing fails.
            setOracleClassifier(AbstractClassifier.forName(classifierName, null));
            setOracleClassifier(AbstractClassifier.forName(classifierName,
                    Utils.partitionOptions(options)));
        } else {

            // This is just to set the classifier in case the option
            // parsing fails.
            setOracleClassifier(AbstractClassifier.forName(defaultOracleClassifierString(), null));
            setOracleClassifier(AbstractClassifier.forName(defaultOracleClassifierString(),
                    Utils.partitionOptions(options)));
        }

        classifierName = Utils.getOption('E', options);

        if (classifierName.length() > 0) {

            // This is just to set the classifier in case the option
            // parsing fails.
            setExtractedClassifier(AbstractClassifier.forName(classifierName, null));
            setExtractedClassifier(AbstractClassifier.forName(classifierName,
                    Utils.partitionOptions(options)));
        } else {

            // This is just to set the classifier in case the option
            // parsing fails.
            setExtractedClassifier(AbstractClassifier.forName(defaultExtractedClassifierString(), null));
            setExtractedClassifier(AbstractClassifier.forName(defaultExtractedClassifierString(),
                    Utils.partitionOptions(options)));
        }

        setAddProbabilityColumns(Utils.getFlag('P', options));

        setAddProbabilityColumns(Utils.getFlag('F', options));

        String tmp = Utils.getOption("S", options);
        if (tmp.length() != 0) {
            setSampling(new SelectedTag(tmp, TAGS_Sampling));
        } else {
            setSampling(new SelectedTag(m_Sampling, TAGS_Sampling));
        }

        String numCopies = Utils.getOption('N', options);
        if (numCopies.length() != 0) {
            setNumCopies(Integer.parseInt(numCopies));
        } else {
            setNumCopies(1);
        }

        String alphaWeight = Utils.getOption('W', options);
        if (alphaWeight.length() != 0) {
            setAlphaWeight(Double.parseDouble(alphaWeight));
        } else {
            setAlphaWeight(0);
        }

        String seedStr = Utils.getOption('R', options);
        if (seedStr.length() != 0) {
            setRandomSeed(Integer.parseInt(seedStr));
        } else {
            setRandomSeed(1);
        }

        String nnStr = Utils.getOption('K', options);
        if (nnStr.length() != 0) {
            setNearestNeighbors(Integer.parseInt(nnStr));
        } else {
            setNearestNeighbors(5);
        }

        tmp = Utils.getOption("C", options);
        if (tmp.length() != 0) {
            setCombiner(new SelectedTag(tmp, TAGS_Combiner));
        } else {
            setCombiner(new SelectedTag(m_Combiner, TAGS_Combiner));
        }

        tmp = Utils.getOption("D", options);
        if (tmp.length() != 0) {
            setDatasetUse(new SelectedTag(tmp, TAGS_DatasetUse));
        } else {
            setDatasetUse(new SelectedTag(m_DatasetUse, TAGS_DatasetUse));
        }

        super.setOptions(options);
    }

    /**
     * Gets the current settings of the Classifier.
     *
     * @return an array of strings suitable for passing to setOptions
     */
    public String[] getOptions() {

        String[] options;
        Vector<String> result = new Vector<String>();

        String[] classifierOptions = ((OptionHandler) getOracleClassifier()).getOptions();
        int extraOptionsLength = classifierOptions.length;
        if (extraOptionsLength > 0) {
            extraOptionsLength++; // for the double hyphen
        }
        result.add("-O");
        result.add(getOracleClassifier().getClass().getName());

        if (classifierOptions.length > 0) {
            result.add("--");
            for (int i = 0; i < classifierOptions.length; i++) {
                if (classifierOptions[i] != null && !classifierOptions[i].isEmpty()) {
                    result.add(classifierOptions[i]);
                }
            }
        }

        classifierOptions = ((OptionHandler) getExtractedClassifier()).getOptions();
        extraOptionsLength += classifierOptions.length;
        if (classifierOptions.length > 0) {
            extraOptionsLength++; // for the double hyphen
        }
        result.add("-E");
        result.add(getExtractedClassifier().getClass().getName());

        if (classifierOptions.length > 0) {
            result.add("--");
            for (int i = 0; i < classifierOptions.length; i++) {
                if (classifierOptions[i] != null && !classifierOptions[i].isEmpty()) {
                    result.add(classifierOptions[i]);
                }
            }
        }

        if (addProbabilities) {
            result.add("-P");
        }
        if (model2file) {
            result.add("-F");
        }
        result.add("-S");
        result.add("" + getSampling());
        result.add("-N");
        result.add("" + getNumCopies());
        result.add("-W");
        result.add("" + getAlphaWeight());
        result.add("-C");
        result.add("" + getCombiner());
        result.add("-D");
        result.add("" + getDatasetUse());
        result.add("-R");
        result.add("" + getRandomSeed());

        result.add("-K");
        result.add("" + getNearestNeighbors());

        options = super.getOptions();
        for (int i = 0; i < options.length; i++) {
            result.add(options[i]);
        }

        String[] array = new String[result.size()];
        for (int i = 0; i < result.size(); i++) {
            array[i] = result.elementAt(i).toString();
        }

        return array; // (String[]) result.toArray(new String[result.size()]);
    }

    /**
     * Returns the tip text for this property.
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String randomSeedTipText() {
        return "The seed used for random sampling.";
    }

    /**
     * Gets the random number seed.
     *
     * @return the random number seed.
     */
    public int getRandomSeed() {
        return m_RandomSeed;
    }

    /**
     * Sets the random number seed.
     *
     * @param value
     *            the new random number seed.
     */
    public void setRandomSeed(int value) {
        m_RandomSeed = value;
    }

    /**
     * Returns the tip text for this property.
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String nearestNeighborsTipText() {
        return "The number of nearest neighbors to use.";
    }

    /**
     * Sets the number of nearest neighbors to use.
     *
     * @param value
     *            the number of nearest neighbors to use
     */
    public void setNearestNeighbors(int value) {
        if (value >= 1) {
            m_NearestNeighbors = value;
        } else {
            System.err.println("At least 1 neighbor necessary!");
        }
    }

    /**
     * Gets the number of nearest neighbors to use.
     *
     * @return the number of nearest neighbors to use
     */
    public int getNearestNeighbors() {
        return m_NearestNeighbors;
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String datasetUseTipText() {
        return "Determines which combinations of training data, predictions on training data and predictions on test data that is used to train the oracle classifier.";
    }

    /**
     * Get the value of
     *
     * @return Value of Settings.
     */
    public SelectedTag getDatasetUse() {
        return new SelectedTag(m_DatasetUse, TAGS_DatasetUse);
    }

    /**
     * Set the value of Settings.
     *
     * @param bnfFile
     *            Value to assign to Settings.
     */
    public void setDatasetUse(SelectedTag value) {
        if (value.getTags() == TAGS_DatasetUse) {
            m_DatasetUse = value.getSelectedTag().getID();
        }
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String SamplingTipText() {
        return "Determines which combiner method to use to determine class probability when sampling.";
    }

    /**
     * Get the value of
     *
     * @return Value of Settings.
     */
    public SelectedTag getSampling() {
        return new SelectedTag(m_Sampling, TAGS_Sampling);
    }

    /**
     * Set the value of Settings.
     *
     * @param bnfFile
     *            Value to assign to Settings.
     */
    public void setSampling(SelectedTag value) {
        if (value.getTags() == TAGS_Sampling) {
            m_Sampling = value.getSelectedTag().getID();
        }
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String CombinerTipText() {
        return "Determines which combiner method to use to determine class probability when sampling.";
    }

    /**
     * Get the value of
     *
     * @return Value of Settings.
     */
    public SelectedTag getCombiner() {
        return new SelectedTag(m_Combiner, TAGS_Combiner);
    }

    /**
     * Set the value of Settings.
     *
     * @param bnfFile
     *            Value to assign to Settings.
     */
    public void setCombiner(SelectedTag value) {
        if (value.getTags() == TAGS_Combiner) {
            m_Combiner = value.getSelectedTag().getID();
        }
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String addProbabilityColumnsTipText() {
        return "Adds columns with probability distributions from base classifier for each class.";
    }

    /**
     * Get the value of cro.
     *
     * @return Value of cro.
     */
    public boolean getAddProbabilityColumns() {

        return addProbabilities;
    }

    /**
     * Set the value of cro.
     *
     * @param crossoverProbability
     *            Value to assign to cro.
     */
    public void setAddProbabilityColumns(boolean value) {

        addProbabilities = value;
        if (addProbabilities
                && getExtractedClassifier() instanceof weka.classifiers.trees.Grex) {
            ((weka.classifiers.trees.Grex) getExtractedClassifier()).useProbabilities();
        }
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String ModelToFileTipText() {
        return "Prints the Oracle Classifier to file using toString.";
    }

    /**
     * Get the value of cro.
     *
     * @return Value of cro.
     */
    public boolean getModelToFile() {

        return model2file;
    }

    /**
     * Set the value of cro.
     *
     * @param crossoverProbability
     *            Value to assign to cro.
     */
    public void setModelToFile(boolean value) {

        model2file = value;
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String NumCopiesTipText() {
        return "The number of new sampled instances.";
    }

    /**
     * Get the value of cro.
     *
     * @return Value of cro.
     */
    public int getNumCopies() {

        return m_numCopies;
    }

    /**
     * Set the value of cro.
     *
     * @param crossoverProbability
     *            Value to assign to cro.
     */
    public void setNumCopies(int value) {

        m_numCopies = value;
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String AlphaWeightTipText() {
        return "Weight used to emphasize the X instances. When set to 0, no weighting is used.";
    }

    /**
     * Get the value of cro.
     *
     * @return Value of cro.
     */
    public double getAlphaWeight() {

        return m_AlphaWeight;
    }

    /**
     * Set the value of cro.
     *
     * @param crossoverProbability
     *            Value to assign to cro.
     */
    public void setAlphaWeight(double value) {

        m_AlphaWeight = value;
    }

    /**
     * Returns the tip text for this property
     *
     * @return tip text for this property suitable for displaying in the
     *         explorer/experimenter gui
     */
    public String ExtractedClassifierTipText() {
        return "The rule extraction classifier to be used.";
    }

    /**
     * Sets the rule extraction classifer.
     *
     * @param classifier
     *            a rule extraction classifier with all options set.
     */
    public void setExtractedClassifier(Classifier classifier) {

        extractedClassifier = classifier;
    }

    /**
     * Gets the rule extraction classifer.
     *
     * @return the Rule Extraction Classifier
     */
    public Classifier getExtractedClassifier() {

        return extractedClassifier;
    }

    /**
     * Set the base learner.
     *
     * @param newClassifier the classifier to use.
     */
    public void setOracleClassifier(Classifier newClassifier) {

        oracleClassifier = newClassifier;
    }

    /**
     * Get the classifier used as the base learner.
     *
     * @return the classifier used as the classifier
     */
    public Classifier getOracleClassifier() {

        return oracleClassifier;
    }

    /**
     * Gets the classifier specification string, which contains the class name of
     * the classifier and any options to the classifier
     *
     * @return the classifier string
     */
    protected String getClassifierSpec() {

        Classifier c = getClassifier();
        return c.getClass().getName() + " "
                + Utils.joinOptions(((OptionHandler) c).getOptions());
    }

    /**
     * Output a representation of this classifier
     */
    public String toString() {

        if (getOracleClassifier() == null) {
            return "OracleClassifier: No base model built yet.";
        }

        if (getExtractedClassifier() == null) {
            return "OracleClassifier: No rule extraction model built yet.";
        }

        String result = "Base classifier is:\n";
        result += getOracleClassifier().toString();
        result += "\nRule Extraction classifier is:\n";
        result += getExtractedClassifier().toString();

        return result;
    }

    public static void main(String[] args) throws Exception {
        runClassifier(new OracleClassifier(), args);

    }

    public Enumeration enumerateMeasures() {
        m_AdditionalMeasures.addElement("measureTestFidelity");
        m_AdditionalMeasures.addElement("measureOracleTestAccuracy");
        m_AdditionalMeasures.addElement("measureExtractedTestAccuracy");
        m_AdditionalMeasures.addElement("measureTestCorrectCorrect");
        m_AdditionalMeasures.addElement("measureTestCorrectWrong");
        m_AdditionalMeasures.addElement("measureTestWrongCorrect");
        m_AdditionalMeasures.addElement("measureTestWrongWrong");
        m_AdditionalMeasures.addElement("measureTrainFidelity");
        m_AdditionalMeasures.addElement("measureOracleTrainAccuracy");
        m_AdditionalMeasures.addElement("measureExtractedTrainAccuracy");
        m_AdditionalMeasures.addElement("measureTrainCorrectCorrect");
        m_AdditionalMeasures.addElement("measureTrainCorrectWrong");
        m_AdditionalMeasures.addElement("measureTrainWrongCorrect");
        m_AdditionalMeasures.addElement("measureTrainWrongWrong");
        if (getExtractedClassifier() instanceof AdditionalMeasureProducer) {
            for (Enumeration<String> enu = ((AdditionalMeasureProducer) getExtractedClassifier()).enumerateMeasures(); enu.hasMoreElements();) {
                m_AdditionalMeasures.add(enu.nextElement());
            }
        }
        return m_AdditionalMeasures.elements();
    }

    public double getMeasure(String measureName) {
        try {
            if (measureName.compareToIgnoreCase("measureTestFidelity") == 0) {
                if (m_UseInsight) {
                    return fidelity(deepInstancesCopy(m_TestsetOriginal));
                }
            } else if (measureName.compareToIgnoreCase("measureOracleTestAccuracy") == 0) {
                if (m_UseInsight) {
                    return accuracy(deepInstancesCopy(m_TestsetOriginal),
                            getOracleClassifier());
                }
            } else if (measureName.compareToIgnoreCase("measureExtractedTestAccuracy") == 0) {
                if (m_UseInsight) {
                    return accuracy(deepInstancesCopy(m_TestsetOriginal),
                            getExtractedClassifier());
                }
            } else if (measureName.compareToIgnoreCase("measureTestCorrectCorrect") == 0) {
                if (m_UseInsight) {
                    return correctCorrect(deepInstancesCopy(m_TestsetOriginal));
                }
            } else if (measureName.compareToIgnoreCase("measureTestCorrectWrong") == 0) {
                if (m_UseInsight) {
                    return correctWrong(deepInstancesCopy(m_TestsetOriginal));
                }
            } else if (measureName.compareToIgnoreCase("measureTestWrongWrong") == 0) {
                if (m_UseInsight) {
                    return wrongWrong(deepInstancesCopy(m_TestsetOriginal));
                }
            } else if (measureName.compareToIgnoreCase("measureTestWrongCorrect") == 0) {
                if (m_UseInsight) {
                    return wrongCorrect(deepInstancesCopy(m_TestsetOriginal));
                }
            } else if (measureName.compareToIgnoreCase("measureTrainFidelity") == 0) {
                return fidelity(deepInstancesCopy(m_Trainset));
            } else if (measureName.compareToIgnoreCase("measureOracleTrainAccuracy") == 0) {
                return accuracy(deepInstancesCopy(m_Trainset), getOracleClassifier());
            } else if (measureName.compareToIgnoreCase("measureExtractedTrainAccuracy") == 0) {
                return accuracy(deepInstancesCopy(m_Trainset),
                        getExtractedClassifier());
            } else if (measureName.compareToIgnoreCase("measureTrainCorrectCorrect") == 0) {
                return correctCorrect(deepInstancesCopy(m_Trainset));
            } else if (measureName.compareToIgnoreCase("measureTrainCorrectWrong") == 0) {
                return correctWrong(deepInstancesCopy(m_Trainset));
            } else if (measureName.compareToIgnoreCase("measureTrainWrongWrong") == 0) {
                return wrongWrong(deepInstancesCopy(m_Trainset));
            } else if (measureName.compareToIgnoreCase("measureTrainWrongCorrect") == 0) {
                return wrongCorrect(deepInstancesCopy(m_Trainset));
            } else if (getExtractedClassifier() instanceof AdditionalMeasureProducer) {
                return ((AdditionalMeasureProducer) getExtractedClassifier()).getMeasure(measureName);
            }
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        return -1;
    }

    private double wrongCorrect(Instances data) throws Exception {
        double num = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            if (getOracleClassifier().classifyInstance(data.instance(i)) != data.instance(i).classValue()
                    && getExtractedClassifier().classifyInstance(data.instance(i)) == data.instance(i).classValue()) {
                num++;
            }
        }
        return num;
    }

    private double wrongWrong(Instances data) throws Exception {
        double num = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            if (getOracleClassifier().classifyInstance(data.instance(i)) != data.instance(i).classValue()
                    && getExtractedClassifier().classifyInstance(data.instance(i)) != data.instance(i).classValue()) {
                num++;
            }
        }
        return num;
    }

    private double correctWrong(Instances data) throws Exception {
        double num = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            if (getOracleClassifier().classifyInstance(data.instance(i)) == data.instance(i).classValue()
                    && getExtractedClassifier().classifyInstance(data.instance(i)) != data.instance(i).classValue()) {
                num++;
            }
        }
        return num;
    }

    private double correctCorrect(Instances data) throws Exception {
        double num = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            if (getOracleClassifier().classifyInstance(data.instance(i)) == data.instance(i).classValue()
                    && getExtractedClassifier().classifyInstance(data.instance(i)) == data.instance(i).classValue()) {
                num++;
            }
        }
        return num;
    }

    private double accuracy(Instances data, Classifier classifier)
            throws Exception {
        double correct = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            if (classifier.classifyInstance(data.instance(i)) == data.instance(
                    i).classValue()) {
                correct++;
            }
        }
        return (correct / data.numInstances()) * 100;
    }

    private double fidelity(Instances data) throws Exception {
        double correct = 0;
        for (int i = 0; i < data.numInstances(); i++) {
            if (getOracleClassifier().classifyInstance(data.instance(i)) == getExtractedClassifier().classifyInstance(data.instance(i))) {
                correct++;
            }
        }
        return (correct / data.numInstances()) * 100;
    }

    private void printModelToFile() {
        try {
            FileWriter fw = new FileWriter("model2file.txt", true);
            PrintWriter pw = new PrintWriter(fw);

            pw.append(new Date(System.currentTimeMillis()).toString() + "\n");
            pw.append("Base Model\n" + getOracleClassifier().toString() + "\n");
            pw.append("Extracted Model\n" + getExtractedClassifier().toString()
                    + "\n\n**************************\n\n");
            pw.close();
        } catch (IOException ex) {
            Logger.getLogger(OracleClassifier.class.getName()).log(
                    Level.SEVERE, null, ex);
        }

    }
}
