package grex.DT.tree;

import grex.BNFException;
import grex.Environment;
import grex.GP;
import grex.Options;
import grex.genes.Equals;
import grex.genes.Gene;
import grex.genes.GeneException;
import grex.genes.If;
import grex.genes.Less;
import grex.genes.ProbVal;
import grex.genes.ProbVar;
import grex.genes.Val;
import grex.genes.Var;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.StringTokenizer;

/* jaDTi package - v0.6.1 */

/*
 *  Copyright (c) 2004, Jean-Marc Francois.
 *
 *  This file is part of jaDTi.
 *  jaDTi is free software; you can redistribute it and/or modify
 *  it under the terms of the GNU General Public License as published by
 *  the Free Software Foundation; either version 2 of the License, or
 *  (at your option) any later version.
 *
 *  jaDTi is distributed in the hope that it will be useful,
 *  but WITHOUT ANY WARRANTY; without even the implied warranty of
 *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *  GNU General Public License for more details.
 *
 *  You should have received a copy of the GNU General Public License
 *  along with Jahmm; if not, write to the Free Software
 *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA

 */
/**
 * A builder of decision trees from a database.<p>
 * The decision tree aims to guess a (so called) 'goal' attribute thanks to
 * 'test attributes' values.  All the values must be known; the class
 * {@link DecisionTreeBuilder DecisionTreeBuilder} handles unknown values.
 **/
public class DecisionTreeBuilder implements Serializable {

    private double entropyThreshold = 0.0001;
    private double scoreThreshold = 5;
    private double orignalScoreThreshold = 0.1;
    private double originalEntropyThreshold = 0.0005;
    private double originalUsePercentOfAttributes = 1;
    private boolean orignialUseOneOfSplits = false;
    private double usePercentOfAttributes = 1;
    private boolean useOneOfSplits = false;
    private LearningDecisionTree learningTree;
    private DecisionTree tree;
    private ItemSet trainingSet;
    private ItemSet testSet;
    SymbolicAttribute goalAttribute;  /* The attribute to guess */

    AttributeSet testAttributes;      /* The attributes on which the tests are
    based. */

    private Environment environment;
    private Dataset dataset;
    private GP gp;
    private GP[] randomGPForrest;
    private DecisionTree[] forrest,bootstrapTrees;
    private Options options;
    private boolean probColumns;
    private ArrayList<GP> arlForrest;
    private double avgBootStrapTreeAcc, avgOutOfBagAcc;
    /**
     * Builds a decision tree.  The learning is based on a learning set and
     * aims at guessing a 'goal' attribute value.  The entropy and score
     * thresholds are set to 0.
     *
     * @param learningSet The (non empty) database holding the learning set.
     * @param testAttributes The attributes on which the tests are based.
     * @param goalAttribute The attribute to guess.
     **/
    public DecisionTreeBuilder(Environment environment, boolean useOneOfSplits, double scoreThreshold, double entropyThreshold, double percentageOfAttributes) {
        this.environment = environment;
        this.options = environment.getOptions();

        this.orignalScoreThreshold = scoreThreshold;
        this.entropyThreshold = this.originalEntropyThreshold = entropyThreshold;
        this.usePercentOfAttributes = this.originalUsePercentOfAttributes = percentageOfAttributes;
        this.useOneOfSplits = this.orignialUseOneOfSplits = useOneOfSplits;
        this.probColumns = environment.getOptions().getFITNESS_FUNCTION().contains("Prob");
    }

    public DecisionTreeBuilder(Environment environment) {
        this(environment, true, 0.01, 0.0001, 1);
    }

    public DecisionTreeBuilder(Environment environment, boolean useOneOfSplits) {
        this(environment, useOneOfSplits, 0.01, 0.5, 1);
    }

    public void buildDecisionTree() {
        dataset = new Dataset(environment.getData(), probColumns, options.getTestFOLD());
        this.scoreThreshold = orignalScoreThreshold * dataset.getTrainset().size();
        this.entropyThreshold = originalEntropyThreshold;
        this.useOneOfSplits = orignialUseOneOfSplits;
        this.usePercentOfAttributes = originalUsePercentOfAttributes;
        this.trainingSet = dataset.getTrainset();
        this.testAttributes = dataset.getTestAttributes();
        this.goalAttribute = dataset.getGoalAttribute();

        learningTree = new LearningDecisionTree(trainingSet.attributeSet(),
                goalAttribute, trainingSet);
        tree = build().decisionTree();
        gp = null;
    }

    public void buildBootStrapTrees(int size) {
        dataset = new Dataset(environment.getData(), probColumns, options.getTestFOLD());
        this.scoreThreshold = orignalScoreThreshold * dataset.getTrainset().size();
        this.entropyThreshold = originalEntropyThreshold;
        this.useOneOfSplits = orignialUseOneOfSplits;
        this.usePercentOfAttributes = originalUsePercentOfAttributes;
        this.trainingSet = dataset.getTrainset();
        this.testAttributes = dataset.getTestAttributes();
        this.goalAttribute = dataset.getGoalAttribute();

        bootstrapTrees = new DecisionTree[size];
        avgBootStrapTreeAcc = 0;
        for (int i = 0; i < size; i++) {
            dataset.createNewBootStrap();
            learningTree = new LearningDecisionTree(trainingSet.attributeSet(),
                    goalAttribute, dataset.getBootstrap());
            bootstrapTrees[i] = build().decisionTree();
            double accO = calcAcc(learningTree, dataset.getOutofBag());
            double accB = calcAcc(learningTree,dataset.getBootstrap());
            avgBootStrapTreeAcc += accB/size;
            avgOutOfBagAcc += accO/size;
        }
    }

        public void buildAscendingBootStrapTrees(int size,double starThreshold,double stopThreshold) {
        dataset = new Dataset(environment.getData(), probColumns, options.getTestFOLD());
         this.scoreThreshold = stopThreshold * dataset.getTrainset().size();
        this.entropyThreshold = originalEntropyThreshold;
        this.useOneOfSplits = orignialUseOneOfSplits;
        this.usePercentOfAttributes = originalUsePercentOfAttributes;
        this.trainingSet = dataset.getTrainset();
        this.testAttributes = dataset.getTestAttributes();
        this.goalAttribute = dataset.getGoalAttribute();

        bootstrapTrees = new DecisionTree[size];
        avgBootStrapTreeAcc = 0;
        for (int i = 0; i < size; i++) {
            dataset.createNewBootStrap();
            learningTree = new LearningDecisionTree(trainingSet.attributeSet(),
                    goalAttribute, dataset.getBootstrap());
            bootstrapTrees[i] = build().decisionTree();
            System.out.println("Threshold: " + (starThreshold - ((starThreshold-stopThreshold)/(size))*(size-1-i)));
            this.scoreThreshold =(starThreshold - ((starThreshold-stopThreshold)/(size))*(size-1-i))*dataset.getTrainset().size();
            double accO = calcAcc(learningTree, dataset.getOutofBag());
            double accB = calcAcc(learningTree,dataset.getBootstrap());
            avgBootStrapTreeAcc += accB/size;
            avgOutOfBagAcc += accO/size;
        }
    }
    
    public double getOutofBagAvgACC(){
        return avgOutOfBagAcc;
    }
    public double getBootstrapACC(){
        return avgBootStrapTreeAcc;
    }

    public void buildRandomForrest(int size) {
        dataset = new Dataset(environment.getData(), probColumns, options.getTestFOLD());
        this.trainingSet = dataset.getTrainset();
        this.testAttributes = dataset.getTestAttributes();
        this.goalAttribute = dataset.getGoalAttribute();
        int nrOfAttributes = trainingSet.attributeSet().size();
        this.usePercentOfAttributes = ((Math.log(nrOfAttributes) / Math.log(2)) + 1) / nrOfAttributes;
        this.entropyThreshold = 0.00001;
        this.scoreThreshold = 0.00001;
        this.useOneOfSplits = false;

        forrest = new DecisionTree[size];

        for (int i = 0; i < size; i++) {
            dataset.createNewBootStrap();
            learningTree = new LearningDecisionTree(trainingSet.attributeSet(),
                    goalAttribute, dataset.getBootstrap());
            forrest[i] = build().decisionTree();
        }
        randomGPForrest = null;
        arlForrest=null;
    }
    /*  public void buildRandomTrees(int size){
    dataset = new Dataset(environment.getData(), options.getTestFOLD());
    orignalScoreThreshold=0.01;
    this.scoreThreshold = orignalScoreThreshold * dataset.getTrainset().size();
    this.trainingSet = dataset.getTrainset();
    this.testAttributes = dataset.getTestAttributes();
    this.goalAttribute = dataset.getGoalAttribute();
    int nrOfAttributes = trainingSet.attributeSet().size();
    this.usePercentOfAttributes = 0.6;//((Math.log(nrOfAttributes)/Math.log(2))+1)/nrOfAttributes;
    this.useOneOfSplits = false;

    forrest = new DecisionTree[size];

    for(int i = 0;i<size;i++){
    learningTree = new LearningDecisionTree(trainingSet.attributeSet(),
    goalAttribute, trainingSet);
    forrest[i]= build().decisionTree();
    }
    randomGPForrest=null;
    }*/

    public DecisionTree getTree() {
        return tree;
    }

    public GP getTreeAsGP() throws BNFException, GeneException {
        if (environment.getPredictorClass().isEmpty()) {
            throw new BNFException("When Starting from a Decisoin tree the predictor class must be defined in the BNF using: \"terminalPredictor\"");
        }
        if (tree == null) {
            return null;
        }
        if (gp == null) {
            Gene head = translateToGene(tree.root(), environment, dataset, environment.getPredictorClass());
            gp = new GP(environment, head);
            gp.train();
        }
        return gp;
    }

    public GP[] getForrestAsGPs() throws GeneException, BNFException {

        if (forrest == null) {
            return null;
        }
        if (randomGPForrest == null) {
            if (arlForrest == null) {
                arlForrest = getForrestAsArrayList();
            }
            randomGPForrest = new GP[arlForrest.size()];
            arlForrest.toArray(randomGPForrest);
        }
        return randomGPForrest;
    }


    public ArrayList<GP> getForrestAsArrayList() throws GeneException, BNFException {

        if (environment.getPredictorClass().isEmpty()) {
            throw new BNFException("When Starting from a random forest the predictor class must be defined in the BNF using: \"terminalPredictor\"");
        }
        if (forrest == null) {
            return null;
        } else if (arlForrest == null) {
            arlForrest = new ArrayList<GP>(forrest.length);
            for (int i = 0; i < forrest.length; i++) {
                Gene head = translateToGene(forrest[i].root(), environment, dataset, environment.getPredictorClass());
                gp = new GP(environment, head);
                gp.train();
                arlForrest.add(gp);
            }
        }
        return arlForrest;
    }

    public ArrayList<GP> getBootStrapTreesSsArrayList() throws GeneException, BNFException {
        ArrayList<GP> arlBootstrapTrees;
        if (environment.getPredictorClass().isEmpty()) {
            throw new BNFException("When Starting from a random forest the predictor class must be defined in the BNF using: \"terminalPredictor\"");
        }
        if (bootstrapTrees == null) {
            return null;
        }

        arlBootstrapTrees = new ArrayList<GP>(bootstrapTrees.length);
        for (int i = 0; i < bootstrapTrees.length; i++) {
            Gene head = translateToGene(bootstrapTrees[i].root(), environment, dataset, environment.getPredictorClass());
            gp = new GP(environment, head);
            gp.train();
            arlBootstrapTrees.add(gp);
        }
        return arlBootstrapTrees;
    }


    public String getFold() {
        return options.getTestFOLD();
    }

    private LearningDecisionTree build() {
        while (learningTree.hasOpenNode()) {
            expand();
        }

        return learningTree;
    }

    /**
     * Expand an open node.
     **/
    public void expand() {
        LearningOpenNode node = (LearningOpenNode) learningTree.openNode();
        if (node == null) {
            throw new CannotCallMethodException("No open node left");
        }

        ItemSet set = node.learningSet();
        double entropy = set.entropy(goalAttribute);

        if (entropy <= entropyThreshold || testAttributes.size() == 0) {
            makeLeafNode(node);
        } else {
            TestScore testScore = set.bestSplitTest(testAttributes,
                    goalAttribute, usePercentOfAttributes, useOneOfSplits);

            if (testScore.score * set.size() <= scoreThreshold) {
                makeLeafNode(node);  /* Forward pruning : test does not
                provide enough information */
            } else {
                makeTestNode(node, testScore.test,
                        testScore.score * set.size());
            }
        }
    }

    /**
     * Sets the maximal entropy of leaf node.  If a leaf node has an entropy
     * higher then the fixed entropy threshold, it is replaced by a test
     * that divides it in multiple subsets of lower entropy (if such a test
     * exists).
     *
     * @param entropy The entropy threshold.
     **/
    public void setEntropyThreshold(double entropy) {
        if (entropy < 0.) {
            throw new IllegalArgumentException("Argument must be positive");
        }

        entropyThreshold = entropy;
    }

    public void setUsePercentOfAttributes(double percent) {
        usePercentOfAttributes = percent;
    }

    public void setUseOneOfSplits(boolean oneOfSplits) {
        useOneOfSplits = oneOfSplits;
    }

    /**
     * Returns the current entropy threshold.
     *
     * @return The current entropy threshold.
     **/
    public double getEntropyThreshold() {
        return entropyThreshold;
    }

    /**
     * Sets the minimal score of a test.  A new test node is created only if
     * its score is higher than the defined threshold.  The score of a test is
     * computed by multiplying the learning set (S) cardinality (N) by the
     * following information value:<br>
     * H(S) - Sum<sub>i = 1...T</sub> N<sub>i</sub> H(S<sub>i</sub>) / N<br>
     * where S<sub>i</sub> is the subset of S matching the i-th of the T
     * test's issues.  Each entropy is computed against the 'goal' attribute.
     *
     * @param entropy The score threshold.
     **/
    public void setTestScoreThreshold(double entropy) {
        if (entropy < 0.) {
            throw new IllegalArgumentException("Argument must be positive");
        }

        scoreThreshold = entropy;
    }

    /**
     * Returns the current minimal score threshold.
     *
     * @return The current score threshold.
     **/
    public double getTestScoreThreshold() {
        return scoreThreshold;
    }

    /**
     * Turns an open node to a leaf.
     *
     * @param openNode The open node to transform into a leaf.
     **/
    protected void makeLeafNode(LearningOpenNode openNode) {
        double nodeWeight = openNode.learningSet().size();

        LearningLeafNode leafNode =
                new LearningLeafNode(nodeWeight, openNode.learningSet());

        openNode.replace(leafNode);
    }

    private void makeTestNode(LearningOpenNode openNode, Test test,
            double score) {
        double nodeWeight = openNode.learningSet().size();

        LearningTestNode testNode =
                new LearningTestNode(nodeWeight, test, score,
                openNode.learningSet());

        openNode.replace(testNode);

        ItemSet[] subSets = openNode.learningSet().split(test);

        for (int i = 0; i < test.nbIssues(); i++) {
            LearningOpenNode node = new LearningOpenNode(subSets[i].size(),
                    subSets[i]);

            testNode.son(i).replace(node);
        }
    }

    private Gene translateToGene(Node node, Environment env, Dataset ds, String predictorClass) throws BNFException {
        Gene gene = null;
        if (node instanceof TestNode) {
            Gene condition = makeGrexIfCondition((TestNode) node, env, ds);
            gene = new If();
            Gene[] children = new Gene[3];
            children[0] = condition;
            children[1] = translateToGene(node.son(1), env, ds, predictorClass);
            children[2] = translateToGene(node.son(0), env, ds, predictorClass);
            gene.setChildren(children);
        }
        if (node instanceof LeafNode) {
            try {
                gene = (Gene) (Class.forName(Options.GENE_PACKAGE + "." + predictorClass).newInstance());
            } catch (Exception e) {
                throw new BNFException("Problems creating predictor objects. " + e.getMessage() + "\n");
            }
        }
        initGene(gene, env);
        return gene;
    }

    private void initGene(Gene gene, Environment env) {
        gene.setEnvironment(env);
        gene.setOptions(env.getOptions());
        gene.setFold(env.getOptions().getTestFOLD());
    }

    private Gene makeGrexIfCondition(TestNode node, Environment env, Dataset ds) throws BNFException {
        Gene operator = null, value = null;
        Gene variable;
        int column;
        double val = 0;
        String op;
        String test = node.test().toString();
        StringTokenizer st = new StringTokenizer(test.toString(), " []");

        String treeCol = st.nextToken();
        column = env.getData().getColumnByName(treeCol);
        if (!env.getOptions().getFITNESS_FUNCTION().contains("Prob")) {
            Var var = new Var();
            var.setVarNr(column);
            var.setConVarNr(column);
            var.setCatVarNr(column);
            variable = var;
        } else {
            ProbVar var = new ProbVar();
            var.setVarNr(column);
            variable = var;
        }
        initGene(variable, env);

        // System.out.println(var + " " + treeCol);

        op = st.nextToken();
        Val v = new Val();
        ProbVal pv = new ProbVal();
        if (op.equals("<")) {
            operator = new Less();
            val = Double.parseDouble(st.nextToken());
            double geneInternalValue = env.getData().calcInternalNumericalValue(column, val, env.getOptions().getTestFOLD());
            if (!env.getOptions().getFITNESS_FUNCTION().contains("Prob")) {
                v.setValValue(geneInternalValue);
                value = v;
            } else {
                pv.setValValue(geneInternalValue);
                value = pv;
            }
        } else {
            SymbolicTest sTest = (SymbolicTest) node.test();
            // System.out.println(sTest);
            operator = new Equals();
            String aName = node.test().attribute.name();
            SymbolicAttribute sAttr = (SymbolicAttribute) ds.getTestAttributes().findByName(aName);


            SymbolicValue[] values = sTest.getTestValues();

            if (values.length != 1)//not impmented for one of splits
            {
                throw new ArrayIndexOutOfBoundsException("OBS ej implementerat för oneOfsplits");
            }
            //System.out.println(sAttr.valueToString(values[0]));
            double geneInternalValue = Double.parseDouble(sAttr.valueToString(values[0])) / sAttr.nbValues;
            if (!env.getOptions().getFITNESS_FUNCTION().contains("Prob")) {
                v.setValValue(geneInternalValue);
                value = v;
            } else {
                pv.setValValue(geneInternalValue);
                value = pv;
            }
        }
        initGene(operator, env);
        initGene(value, env);

        Gene[] children = {variable, value};
        operator.setChildren(children);
        //    System.out.println(test);
        //    System.out.println(column + op + value);

        return operator;
    }

    public double calcAcc(boolean useTestSet) {
        ItemSet itemSet;
        if (useTestSet) {
            testSet = dataset.getTestset();
            itemSet = testSet;
        } else {
            itemSet = trainingSet;
        }
        double ratio = 0.;
        int nbTests = 0;
        //     System.out.println("Real\tTreePred");
        for (int i = 0; i < itemSet.size(); i++) {
            Item testItem = itemSet.item(i);
            /*    if(useTestSet){
            System.out.println(""+testItem.valueOf(tree.getAttributeSet().size() - 1)  +"\t"+ tree.guessGoalAttribute(testItem)+"\t" +
            (tree.guessGoalAttribute(testItem).equals(testItem.valueOf(tree.getAttributeSet().size() - 1))));
            }*/
            if (tree.guessGoalAttribute(testItem).
                    equals(testItem.valueOf(tree.getAttributeSet().size() - 1))) {
                ratio++;
            }

            nbTests++;
        }

        return ratio / (double) nbTests;
    }

        public double calcAcc(LearningDecisionTree tree, ItemSet itemSet) {
        double ratio = 0.;
        int nbTests = 0;
        for (int i = 0; i < itemSet.size(); i++) {
            Item testItem = itemSet.item(i);
            if (tree.guessGoalAttribute(testItem).
                    equals(testItem.valueOf(tree.getAttributeSet().size() - 1))) {
                ratio++;
            }
            nbTests++;
        }

        return ratio / (double) nbTests;
    }

    public double calcForrestAcc(boolean useTestSet) {
        ItemSet itemSet;
        if (useTestSet) {
            testSet = dataset.getTestset();
            itemSet = testSet;
        } else {
            itemSet = trainingSet;
        }
        double ratio = 0.;
        int nbTests = 0;

        for (int i = 0; i < itemSet.size(); i++) {
            Item testItem = itemSet.item(i);
            Hashtable<KnownSymbolicValue, Integer> votes = new Hashtable<KnownSymbolicValue, Integer>();
            KnownSymbolicValue guess = null;
            for (DecisionTree tree : forrest) {
                guess = tree.guessGoalAttribute(testItem);
                Integer count = votes.get(guess);
                if (count == null) {
                    votes.put(guess, 1);
                } else {
                    votes.put(guess, 1 + count);
                }
            }

            int maxVote = 0;
            for (KnownSymbolicValue prediction : votes.keySet()) {
                int voteCount = votes.get(prediction);
                if (maxVote < voteCount) {
                    maxVote = voteCount;
                    guess = prediction;
                }
            }

            //  System.out.println(useTestSet+"\tPrediction:"+guess + " Taget" + testItem.valueOf(dataset.getAttributeSet(),goalAttribute)+" ="+(guess.intValue==((KnownSymbolicValue)testItem.valueOf(dataset.getAttributeSet(),goalAttribute)).intValue));


            //if (guess.equals(testItem.valueOf(dataset.getAttributeSet(),goalAttribute))){// tree.getAttributeSet().size() - 1))) {
            if (guess.intValue == ((KnownSymbolicValue) testItem.valueOf(dataset.getAttributeSet(), goalAttribute)).intValue) {
                ratio++;
            }

            nbTests++;
        }

        return ratio / (double) nbTests;
    }
}
