package ccnl.demo.algo;

import ccnl.demo.JrddTools;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.mllib.tree.DecisionTree;
import org.apache.spark.mllib.tree.model.DecisionTreeModel;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.codehaus.janino.Java;
import scala.Tuple2;

import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;

/**
 * Created by wong on 16/4/20.
 */
public class DT implements Serializable {
    SQLContext sqlContext;
    public DT(SQLContext sqlC) {this.sqlContext = sqlC;}

    public void runGiven(JavaRDD<LabeledPoint> train, JavaRDD<LabeledPoint> test) {
        Integer numClasses = 2;
        Map<Integer, Integer> cateFeaInfo = new HashMap<Integer, Integer>();
        cateFeaInfo.put(2, 2);
        cateFeaInfo.put(3, 3);
        cateFeaInfo.put(4, 389);
        cateFeaInfo.put(5, 6);
        cateFeaInfo.put(6, 35);
        cateFeaInfo.put(7, 6);
        String impurity = "gini";
        //Integer maxDepth=6;
        Integer maxBins = 400;

        for (int i = 1; i <= 31; i++) {
            final DecisionTreeModel model= DecisionTree.trainClassifier(train, numClasses, cateFeaInfo,
                    impurity, new Integer(i), maxBins);

            JavaRDD<Tuple2<Double, Double>> pl = test.map(v1 -> new Tuple2<Double, Double>(model.predict(v1.features()),v1.label()));

            System.out.println("max-depth = " + i);
            printMetricsLog(pl);
        }

    }
    public void printMetricsLog(JavaRDD<Tuple2<Double, Double>> pl) {
        Double predictedPosNum = 1.0 * pl.filter(v1 -> v1._1().equals(1.0)).count();

        Double labeledPosNum=1.0*pl.filter(v1 -> v1._2().equals(1.0)).count();

        Double predictedNegNum=1.0*pl.filter(v1 -> v1._1().equals(0.0)).count();

        Double labeledNegNum=1.0*pl.filter(v1 -> v1._2().equals(0.0)).count();

        Double truePosNum=1.0*pl.filter(v1 -> v1._1().equals(1.0)&&v1._2().equals(1.0)).count();

        Double trueNegNum=1.0*pl.filter(v1 -> v1._1().equals(0.0)&&v1._2().equals(0.0)).count();

        Double pos_precision=truePosNum/predictedPosNum;
        Double pos_recall=truePosNum/labeledPosNum;

        Double neg_precison=trueNegNum/predictedNegNum;
        Double neg_recall=trueNegNum/labeledNegNum;

        Double pos_F1=2.0*pos_precision*pos_recall/(pos_precision+pos_recall);

        Double testErr=1.0*pl.filter(v1 -> !v1._1().equals(v1._2())).count()/pl.count();

        System.out.println("-------------------");
        System.out.println("pos_precision: " + pos_precision + "  neg_precision: " + neg_precison);
        System.out.println("pos_recall: " + pos_recall + "  neg_recall" + neg_recall);
        System.out.println("pos_F1: " + pos_F1 + "  Test Error: " + testErr);
        System.out.println("-------------------");
    }
    public void DTEntry(String datasetPath) {
        DataFrame df = sqlContext.read().parquet(datasetPath);
        JavaRDD<Row> jrdd = df.toJavaRDD();
        JavaRDD<Row> jrdd_train = JrddTools.getTrain(jrdd);
        JavaRDD<Row> jrdd_test = JrddTools.getTest(jrdd);

        JavaRDD<Row> jrdd_usefultrain = JrddTools.getSampledTrain(jrdd_train, 0.2);
        JavaRDD<LabeledPoint> jrdd_lp_train = JrddTools.convertRow2Lp(jrdd_usefultrain);
        JavaRDD<LabeledPoint> jrdd_lp_test = JrddTools.convertRow2Lp(jrdd_test);

        runGiven(jrdd_lp_train, jrdd_lp_test);

    }

}
