package ccnl.demo.algo;

import ccnl.demo.JrddTools;
import org.apache.hadoop.hdfs.util.EnumCounters;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.mllib.regression.LabeledPoint;
import org.apache.spark.mllib.tree.GradientBoostedTrees;
import org.apache.spark.mllib.tree.configuration.BoostingStrategy;
import org.apache.spark.mllib.tree.model.GradientBoostedTreesModel;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import scala.Tuple2;

import java.util.HashMap;
import java.util.Map;

/**
 * Created by wong on 16/4/21.
 */
public class GBT {
    SQLContext sqlContext;
    public GBT(SQLContext sqlC) {
        sqlContext = sqlC;
    }

    public void runGiven(JavaRDD<LabeledPoint> train, JavaRDD<LabeledPoint> test) {
        BoostingStrategy bs = BoostingStrategy.defaultParams("Classification");
        bs.setNumIterations(20);
        bs.getTreeStrategy().setNumClasses(2);
        bs.getTreeStrategy().setMaxDepth(10);
        bs.getTreeStrategy().setMaxBins(400);

        Map<Integer,Integer> cateFeaInfo = new HashMap<Integer, Integer>();
        cateFeaInfo.put(2, 2);
        cateFeaInfo.put(3, 3);
        cateFeaInfo.put(4, 389);
        cateFeaInfo.put(5, 6);
        cateFeaInfo.put(6, 35);
        cateFeaInfo.put(7, 6);
        bs.treeStrategy().setCategoricalFeaturesInfo(cateFeaInfo);

        final GradientBoostedTreesModel model = GradientBoostedTrees.train(train, bs);


        JavaRDD<Tuple2<Double,Double>> pl = test.map(v1 -> {
            return new Tuple2<Double, Double>(model.predict(v1.features()), v1.label());
        });

        printMetricsLog(pl);

    }

    public void printMetricsLog(JavaRDD<Tuple2<Double, Double>> pl) {
        Double predictedPosNum = 1.0 * pl.filter(v1 -> v1._1().equals(1.0)).count();

        Double labeledPosNum = 1.0 * pl.filter(v1 -> v1._2().equals(1.0)).count();

        Double predictedNegNum = 1.0 * pl.filter(v1 -> v1._1().equals(0.0)).count();

        Double labeledNegNum = 1.0 * pl.filter(v1 -> v1._2().equals(0.0)).count();

        Double truePosNum = 1.0 * pl.filter(v1 -> v1._1().equals(1.0) && v1._2().equals(1.0)).count();

        Double trueNegNum = 1.0 * pl.filter(v1 -> v1._1().equals(0.0) && v1._2().equals(0.0)).count();

        Double pos_precision = truePosNum / predictedPosNum;
        Double pos_recall = truePosNum / labeledPosNum;

        Double neg_precison = trueNegNum / predictedNegNum;
        Double neg_recall = trueNegNum / labeledNegNum;

        Double pos_F1 = 2.0 * pos_precision * pos_recall / (pos_precision + pos_recall);

        Double testErr = 1.0 * pl.filter(v1 -> !v1._1().equals(v1._2())).count() / pl.count();

        System.out.println("-------------------");
        System.out.println("pos_precision: " + pos_precision + "  neg_precision: " + neg_precison);
        System.out.println("pos_recall: " + pos_recall + "  neg_recall" + neg_recall);
        System.out.println("pos_F1: " + pos_F1 + "  Test Error: " + testErr);
        System.out.println("-------------------");
    }

    public void GBTEntry(String datasetPath) {
        DataFrame df = sqlContext.read().parquet(datasetPath);
        JavaRDD<Row> jrdd = df.toJavaRDD();
        JavaRDD<Row> jrdd_train = JrddTools.getTrain(jrdd);
        JavaRDD<Row> jrdd_test = JrddTools.getTest(jrdd);

        JavaRDD<LabeledPoint> jrdd_lp_train = JrddTools.convertRow2Lp(jrdd_train);
        JavaRDD<LabeledPoint> jrdd_lp_test = JrddTools.convertRow2Lp(jrdd_test);

        runGiven(jrdd_lp_train, jrdd_lp_test);

    }
}
