/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package grex.Conformal;

import grex.Data.Prediction;
import grex.Data.PredictionContainer;
import grex.WekaModels.WekaPredictiveModel;
import grex.Data.ExtractionArffTableModel;
import grex.fitnessfunctions.ErrorFunctions.ErrorCalculator;
import grex.fitnessfunctions.ErrorFunctions.IErrorFunction;
import grex.Nodes.GeneException;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.logging.Level;
import java.util.logging.Logger;
import grex.Data.ArffTableModel;
import grex.Environment;
import grex.IPredictiveModel;
import grex.Nodes.ITerminal;
import grex.Nodes.TargetPred;
import grex.Options;
import grex.WekaModels.GrexLinearRegression;
import grex.WekaModels.GrexM5PBag;

/**
 *
 * @author RIK
 */
public class ConformalCalculator {
      
    private IConformalMetric conformalMetric;
    double significance, pAlpha, tieSmoother;
    ArrayList<ConformalScoreValuePair> calibrationSet;
    PrintWriter pw;
    ErrorCalculator ec;
    ArffTableModel dataset;
    Boolean CLASSIFICATION = false;
    private Environment env;
     
    public ConformalCalculator(Environment env) {
        this.env=env;
        if (CLASSIFICATION = env.getOptions().getPROBLEM_TYPE() == Options.CLASSIFICATION) {
            conformalMetric = new conformalMargin();
        } else {
            if(env.getOptions().getPARAMETER_4()>1)
                conformalMetric = new ConformalExpSAE();
            else
                conformalMetric = new ConformalAE();
        }
        try {
            pw = new PrintWriter("Conformal_result" + System.currentTimeMillis() + ".txt");
            pw.println("Dataset\tModel\tCType\tAlpha\tTrueTarget\tMetric\tValue");
            ec = new ErrorCalculator(env);            
        } catch (FileNotFoundException ex) {
            Logger.getLogger(ConformalCalculator.class.getName()).log(Level.SEVERE, null, ex);
        }
    }

    public void closeFile() {
        pw.close();
    }

    private void setOpaquePredictoinsAsTarget(PredictionContainer modelPredictions, PredictionContainer opaquePredictoins) {
        for (Prediction p : modelPredictions.values()) {
            Prediction op = opaquePredictoins.get(p.getInstance());
            p.setTargetValue(op.getPrediction());
        }
    }

    private void resetTrueTargets(PredictionContainer modelPredictions, PredictionContainer opaquePredictoins) {
        for (Prediction p : modelPredictions.values()) {
            Prediction op = opaquePredictoins.get(p.getInstance());
            p.setTargetValue(op.getTargetValue());
        }
    }
    /*
     * public void calcResult(HashMap<String, ArrayList<IPredictiveModel>>
     * predictiveModels, Options ops, double significance,ArffTableModel dataset) {
     * calcResult(predictiveModels, ops, significance, dataset, "Normal");
     * calcResult(predictiveModels, ops, significance, dataset, "Fidelity");
     *
     * }
     */

    public void calcResult(HashMap<String, ArrayList<IPredictiveModel>> predictiveModels, Options ops, double alpha, Environment env, String cType, boolean trueTarget) {
        try {
            IPredictiveModel opaqueModel = null;
            ec = new ErrorCalculator(env);
            this.significance = alpha;
            this.dataset = env.getData();
            for (String name : predictiveModels.keySet()) {
                ArrayList<IPredictiveModel> models = predictiveModels.get(name);
                double validity = 0, zeroC = 0, oneC = 0, multC = 0, oneAcc = 0, size = 0, acc = 0, auc = 0, bri = 0, boCof = 0, meanIntervall = 0, medianIntervall=0;
                for (IPredictiveModel model : models) {



                    if (cType.equals("Fidelity(C)") && dataset instanceof ExtractionArffTableModel) {
                        opaqueModel = ((ExtractionArffTableModel) dataset).getModel();
                        opaqueModel.initFold(model.getFold());
                        opaqueModel.train();
                        opaqueModel.execute(opaqueModel.getPcVal());
                        opaqueModel.execute(opaqueModel.getPcTest());

                        setOpaquePredictoinsAsTarget(model.getPcVal(), opaqueModel.getPcVal());
                        if (!trueTarget) {
                            setOpaquePredictoinsAsTarget(model.getPcTest(), opaqueModel.getPcTest());
                        }
                    }
                    if (cType.equals("RegelX(B)") && dataset instanceof ExtractionArffTableModel) {
                        opaqueModel = ((ExtractionArffTableModel) dataset).getModel();
                        opaqueModel.initFold(model.getFold());
                        opaqueModel.train();
                        // opaqueModel.execute(opaqueModel.getPcVal());
                        opaqueModel.execute(opaqueModel.getPcTest());
                        //setOpaquePredictoinsAsTarget(model.getPcVal(),opaqueModel.getPcVal());
                        if (!trueTarget) {
                            setOpaquePredictoinsAsTarget(model.getPcTest(), opaqueModel.getPcTest());
                        }
                    }
                    System.out.println("Technique:" + name);
                    conformalMetric.initICPModel(env, model.getPcTrain());
                    initPValue(model);
                    
                    HashMap<Prediction, ArrayList<Double>> predictionSets = calcPredictionIntervalls(model.getPcTest());


                    if (!CLASSIFICATION) { //IF REGRESSION task                        
                        validity += calcValidity(predictionSets) / models.size();
                        medianIntervall += clacMedianIntervall(predictionSets)/ models.size();
                        meanIntervall += clacMeanIntervall(predictionSets)/ models.size();
                    } else { // IF CLASSIFICATION task                        
                        validity += calcValidity(predictionSets) / models.size();
                        HashMap<Integer, Double> count = getPredictionDistribution(predictionSets);
                        oneAcc += calcAccOne(predictionSets) / models.size();
                        size += model.getNrOfNodes() / models.size();
                        acc += ec.calcError(model.getPcTest(), IErrorFunction.ACCURACY) / models.size() / 100;
                        auc += ec.calcError(model.getPcTest(), IErrorFunction.AUC) / models.size() / 100;
                        bri += ec.calcError(model.getPcTest(), IErrorFunction.BRI) / models.size() / 100;
                        boCof += calcBoCof(predictionSets) / models.size();
                        for (Integer c : count.keySet()) {
                            if (c == 0) {
                                zeroC += count.get(c) / models.size();
                            } else if (c == 1) {
                                oneC += count.get(c) / models.size();
                            } else {
                                multC += count.get(c) / models.size();
                            }
                        }
                        if (opaqueModel != null) {
                            resetTrueTargets(model.getPcVal(), opaqueModel.getPcVal());
                            resetTrueTargets(model.getPcTest(), opaqueModel.getPcTest());
                        }


                    }// END CLASSIFICATION


                }
                pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\ttval:\t" + validity);
                pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tmeanI:\t" + meanIntervall);
                pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tmedianI:\t" + medianIntervall);
                if (CLASSIFICATION) {
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\toneAcc:\t" + oneAcc);
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tzeroC:\t" + zeroC);
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\toneC:\t" + oneC);
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tboCof:\t" + boCof);
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tmultiC:\t" + multC);
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tacc:\t" + (1.0 - acc));
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tauc:\t" + (1.0 - auc));
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tbri:\t" + bri);
                    pw.println(ops.getDATA_FILE() + "\t" + name + "\t" + cType + "\t" + alpha + "\t" + trueTarget + "\tsize:\t" + size);
                }
            }

        } catch (Exception e) {
            grex.ErrorManagement.ErrorManager.getInstance().reportError(e);
        }
    }
    int totalTies;

    public void initPValue(IPredictiveModel model) {
        calibrationSet = new ArrayList<ConformalScoreValuePair>(model.getPcVal().size());
        
        for (Prediction pr : model.getPcVal().values()) {
            double tVal = pr.getTargetValue();
            if(!CLASSIFICATION)
                tVal = Math.abs(pr.getTargetValue() - pr.getPrediction());
            double cScore = conformalMetric.calcAlpha(pr, pr.getTargetValue());
            calibrationSet.add(new ConformalScoreValuePair(cScore, tVal));
        }

        Collections.sort(calibrationSet);

        System.out.println("Calibration\nAlpha\tReal");
        for (ConformalScoreValuePair pb : calibrationSet) {
            System.out.println(pb.getCScore() + "\t" + pb.getValue());
        }


        int instanceAlpha = (int) Math.ceil(significance * calibrationSet.size());
        if (instanceAlpha == calibrationSet.size()) {
            instanceAlpha--;
        }
        pAlpha = calibrationSet.get(instanceAlpha).getCScore();
        System.out.println("pAlpha " + pAlpha);
        double tieIn = 0, tieEx = 0;
        int i = instanceAlpha;
        while (i > 0 && calibrationSet.get(i).getCScore() == pAlpha) {
            tieIn++;
            i--;
        }
        i = instanceAlpha + 1;
        while (i < calibrationSet.size() && calibrationSet.get(i).getCScore() == pAlpha) {
            tieEx++;
            i++;
        }

        /**
         * +1 i tieIn för att: Om vi har följande i ordning x x o o o o o från
         * början där x är de som är out så borde ju följande 2 (#) vara de som
         * innebär att den inte kommer med # x # x o o o o o medan följande 6
         * innebär att den inte kommer med: x x # o # o # o # o # o #
         */
        tieSmoother = (tieIn + 1) / (1 + tieIn + tieEx);

    }

    private HashMap<Prediction, ArrayList<Double>> calcPredictionIntervalls(PredictionContainer pc) {
        HashMap<Prediction, ArrayList<Double>> acceptedClasses = new HashMap<>(pc.size());
        for (Prediction prediction : pc.values()) {
            acceptedClasses.put(prediction, conformalMetric.calcPredictionSet(prediction, pAlpha));
        }
        return acceptedClasses;
    }

    private double clacMeanIntervall(HashMap<Prediction, ArrayList<Double>> intervalls) {
        double total = 0;
        for (ArrayList<Double> intervall : intervalls.values()) {
            total += intervall.get(UPPER) - intervall.get(LOWER);
        }
        return total / intervalls.size();
    }
    
    private double clacMedianIntervall(HashMap<Prediction, ArrayList<Double>> intervalls) {
        double total = 0;
        ArrayList<Double> vals = new ArrayList<>(intervalls.values().size());
        for (ArrayList<Double> intervall : intervalls.values()) {
            vals.add(intervall.get(UPPER) - intervall.get(LOWER));
        }
        Collections.sort(vals);
        
        return vals.get(vals.size()/2);
    }
    
    public double calcBoCof(HashMap<Prediction, ArrayList<Double>> acceptedClasses) {
        double boCof = 0;
        for (Prediction prediction : acceptedClasses.keySet()) {
            ArrayList<Double> classes = acceptedClasses.get(prediction);
            boCof += classes.size();
        }
        return boCof / acceptedClasses.size();
    }

    public double calcAccOne(HashMap<Prediction, ArrayList<Double>> acceptedClasses) {
        double oneAcc = 0;

        int count = 0;
        for (Prediction prediction : acceptedClasses.keySet()) {
            ArrayList<Double> classes = acceptedClasses.get(prediction);
            if (classes.size() == 1) {
                count++;
                double c = classes.get(0);
                if (c == prediction.getTargetValue()) {
                    oneAcc++;
                }
            }

        }
        if (count == 0) {
            return 0;
        }
        return oneAcc / count;
    }
    final int LOWER = 0, UPPER = 1;

    private double calcValidity(HashMap<Prediction, ArrayList<Double>> cSet) {
        double val = 0;
        for (Prediction prediction : cSet.keySet()) {
            ArrayList<Double> cItems = cSet.get(prediction);
            if (CLASSIFICATION) {
                for (double c : cItems) {
                    if (c == prediction.getTargetValue()) {
                        val++;
                        break;
                    }
                }
            } else {
                if (prediction.getTargetValue() >= cItems.get(LOWER) && prediction.getTargetValue() <= cItems.get(UPPER)) {
                    val++;
                }
            }
        }
        return val / cSet.size();
    }

    private HashMap<Integer, Double> getPredictionDistribution(HashMap<Prediction, ArrayList<Double>> acceptedClasses) {
        HashMap<Integer, Double> dist = new HashMap<Integer, Double>();
        for (ArrayList<Double> classes : acceptedClasses.values()) {
            int preds = classes.size();
            Double count = dist.get(preds);
            if (count == null) {
                count = new Double(0);
            }
            count++;
            dist.put(preds, count);

        }
        for (Integer count : dist.keySet()) {
            double c = dist.get(count) / acceptedClasses.size();
            dist.put(count, c);
        }
        return dist;
    }

    class ConformalScoreValuePair implements Comparable, Serializable {

        double cScore, value;

        public ConformalScoreValuePair(double cScore, double c) {
            this.cScore = cScore;
            this.value = c;
        }

        public int compareTo(Object o) {
            ConformalScoreValuePair pv = (ConformalScoreValuePair) o;
            if (cScore > pv.getCScore()) {
                if (CLASSIFICATION) {
                    return -1;
                } else {
                    return 1;
                }
            }
            if (cScore < pv.getCScore()) {
                if (CLASSIFICATION) {
                    return 1;
                } else {
                    return -1;
                }
            }
            return 0;
        }

        public double getCScore() {
            return cScore;
        }

        public double getValue() {
            return value;
        }

        public String toString() {
            return "cScore: " + cScore + " value: " + value;
        }
    }
    
//--------------------------Classification metrics--------------------------------    
    
        private class conformalProb implements IConformalMetric {

        @Override
        public double calcAlpha(Prediction p, double clas) {
            return p.getProbForClass(clas);
        }

        @Override
        public ArrayList<Double> calcPredictionSet(Prediction prediction, double pAlpha) {
            ArrayList<Double> classes = new ArrayList<Double>();
            double[] probs = prediction.getProbs();

            for (int c = 0; c < probs.length; c++) {
                double alpha = calcAlpha(prediction, c);
                System.out.print(alpha + "\t");
                if (alpha > pAlpha || alpha == pAlpha && Options.rnd.nextDouble() < tieSmoother) {
                    classes.add((double) c);
                    System.out.print(c + "\t");
                } else {
                    System.out.print("*\t");
                }
            }
            return classes;
        }

        @Override
        public void initICPModel(Environment env, PredictionContainer pcTrain) {

        }
    }

    private class conformalMargin extends conformalProb implements IConformalMetric {

        @Override
        public double calcAlpha(Prediction p, double clas) {
            double[] probs = p.getProbs();
            double max = 0;
            for (int i = 0; i < probs.length; i++) {
                if (i != clas) {
                    if (probs[i] > max) {
                        max = probs[i];
                    }
                }
            }
            return p.getProbForClass(clas) - max;
        }
        
        
    }
}
