package com.bw;

import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.evaluation.EvalMultiClassBatchOp;
import com.alibaba.alink.operator.batch.source.CsvSourceBatchOp;
import com.alibaba.alink.operator.common.evaluation.MultiClassMetrics;
import com.alibaba.alink.pipeline.PipelineModel;
import com.alibaba.alink.pipeline.classification.NaiveBayes;
import com.alibaba.alink.pipeline.classification.RandomForestClassifier;
import com.alibaba.alink.pipeline.tuning.GridSearchCV;
import com.alibaba.alink.pipeline.tuning.GridSearchCVModel;
import com.alibaba.alink.pipeline.tuning.MultiClassClassificationTuningEvaluator;
import com.alibaba.alink.pipeline.tuning.ParamGrid;

public class Test1 {
    public static void main(String[] args) throws Exception {

        //创建Maven Project，添加pom依赖，编写程序，读取蘑菇数据集，提取特征features和标签label，显示样本数据；（3分）
        BatchOperator.setParallelism(1);
        String file = "datafile/mushroom.data.txt";
        String schema = "f0 string,f1 string,f2 string,f3 string,label string,f4 string,f5 string,f6 string,f7 string," +
                "f8 string,f9 string,f10 string,f11 string,f12 string,f13 string,f14 string,f15 string,f16 string," +
                "f17 string,f18 string,f19 string,f20 string,f21 string";

        CsvSourceBatchOp data1 = new CsvSourceBatchOp()
                .setFilePath(file)
                .setSchemaStr(schema)
                //.setIgnoreFirstLine(true)
                .setFieldDelimiter(",");
//        data1.print();

        // （2）选择合适属性作为特征值，划分数据集为trainData、validateData和testData，比例为8:1:1；（3分）
        // 训练集
        BatchOperator <?> trainData = new SplitBatchOp().setFraction(0.8).linkFrom(data1);
        BatchOperator <?> middleData = trainData.getSideOutput(0);

        // 验证集
        BatchOperator<?> validateData=new SplitBatchOp().setFraction(0.5).linkFrom(middleData);
        // 测试集
        BatchOperator<?> testData =validateData.getSideOutput(0);


        //（3）使用Alink库中Cart决策树分类或随机森林分类算法，设置合适超参数值，使用训练集trainData构建构建分类模型，并且使用验证集validateData预测和评估，打印输出混淆矩阵：（6分）

        String[] features = {"f0","f1","f2","f3","f4","f5","f6","f7","f8","f9","f10","f11","f12","f13","f14","f15","f16","f17","f18","f19","f20","f21"};
        String lable="label";


        // 随机森林
        RandomForestClassifier rf = new RandomForestClassifier()
                .setPredictionDetailCol("pred_detail")
                .setPredictionCol("pred")
                .setLabelCol(lable)
                .setMaxDepth(100)
                .setMaxLeaves(200)
                .setFeatureCols(features)
                .enableLazyPrintModelInfo("info");

        BatchOperator<?> rf_result = rf.fit(trainData).transform(validateData);


        // 收集指标
        MultiClassMetrics metrics = new EvalMultiClassBatchOp()
                .setLabelCol(lable).setPredictionDetailCol("pred_detail")
                .linkFrom(rf_result).collectMetrics();
        System.out.println("Macro Precision:" + metrics.getMacroPrecision());
        System.out.println("Micro Recall:" + metrics.getMicroRecall());
        System.out.println("Weighted Sensitivity:" + metrics.getWeightedSensitivity());

        //（4）使用Alink库中逻辑回归分类或朴素贝叶斯分类算法，设置合适超参数值，使用训练集构建构建分类模型，并且使用验证集预测和评估，打印输出混淆矩阵：（6分）

        // 朴素
        NaiveBayes ns = new NaiveBayes()
                .setFeatureCols(features)
                .setLabelCol(lable)
                .setPredictionDetailCol("pred_detail")
                .setPredictionCol("pred")
                .setSmoothing(0.1)
                .setNumThreads(1)
                .enableLazyPrintModelInfo("info");
        BatchOperator<?> nb_result = ns.fit(trainData).transform(validateData);
        // 收集指标
        MultiClassMetrics nb_metrics = new EvalMultiClassBatchOp()
                .setLabelCol(lable).setPredictionDetailCol("pred_detail")
                .linkFrom(nb_result).collectMetrics();
        System.out.println("Macro Precision:" + nb_metrics.getMacroPrecision());
        System.out.println("Micro Recall:" + nb_metrics.getMicroRecall());
        System.out.println("Weighted Sensitivity:" + nb_metrics.getWeightedSensitivity());

        //（5）分别调整第（3）题和第（4）题中算法超参数值，进行训练模型和预测评估（评估指标：Accuracy），获取最佳模型bestModel；（8分）
        MultiClassClassificationTuningEvaluator eval2 = new MultiClassClassificationTuningEvaluator()
                .setLabelCol("label")
                .setPredictionDetailCol("pred_detail")
                .setTuningMultiClassMetric("ACCURACY");
        //随机森林网格搜索
        ParamGrid rf_paramGrid = new ParamGrid()
                .addGrid(rf, RandomForestClassifier.SUBSAMPLING_RATIO, new Double[] {0.5, 0.8})
                .addGrid(rf, RandomForestClassifier.NUM_TREES, new Integer[] {3, 6});
        GridSearchCV rf_cv = new GridSearchCV()
                .setEstimator(rf)
                .setParamGrid(rf_paramGrid)
                .setTuningEvaluator(eval2)
                .setNumFolds(2)
                .enableLazyPrintTrainInfo("TrainInfo");
        GridSearchCVModel rf_model = rf_cv.fit(trainData);
        PipelineModel rf_best_model = rf_model.getBestPipelineModel();

//        //朴素贝叶斯
        ParamGrid ns_paramGrid = new ParamGrid()
                .addGrid(ns, NaiveBayes.SMOOTHING, new Double[] {0.1, 0.3})
                .addGrid(ns,NaiveBayes.NUM_THREADS,new Integer[]{1,2});

        GridSearchCV ns_cv = new GridSearchCV()
                .setEstimator(ns)
                .setParamGrid(ns_paramGrid)
                .setTuningEvaluator(eval2)
                .setNumFolds(2)
                .enableLazyPrintTrainInfo("TrainInfo");
        GridSearchCVModel ns_model = ns_cv.fit(trainData);
        PipelineModel ns_best_model = ns_model.getBestPipelineModel();

        //6、使用上题中最佳模型分别对测试数据集testData进行预测，并评估选择好分类模型，保存AK文件；（4分）
        BatchOperator<?> cv_rf_result = rf_best_model.transform(testData);
        BatchOperator<?> cv_ns_result = ns_best_model.transform(testData);

        EvalMultiClassBatchOp eval = new EvalMultiClassBatchOp()
                .setLabelCol(lable)
                .setPredictionDetailCol("pred_detail");
        MultiClassMetrics  cv_rf_metrics = eval.linkFrom(cv_rf_result).collectMetrics();
        MultiClassMetrics  cv_ns_metrics = eval.linkFrom(cv_ns_result).collectMetrics();

        if(cv_rf_metrics.getAccuracy() > cv_ns_metrics.getAccuracy()){
            rf_best_model.save("datafile/yk04_bast_model");
        }else{
            ns_best_model.save("datafile/yk04_bast_model");
        }
        BatchOperator.execute();
    }
}
