package com.bw;

import com.alibaba.alink.common.io.filesystem.FilePath;
import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.evaluation.EvalMultiClassBatchOp;
import com.alibaba.alink.operator.batch.feature.BinningTrainBatchOp;
import com.alibaba.alink.operator.batch.sink.AkSinkBatchOp;
import com.alibaba.alink.operator.batch.source.CsvSourceBatchOp;
import com.alibaba.alink.operator.batch.source.MemSourceBatchOp;
import com.alibaba.alink.operator.batch.sql.FilterBatchOp;
import com.alibaba.alink.operator.batch.sql.SelectBatchOp;
import com.alibaba.alink.operator.common.evaluation.MultiClassMetrics;
import com.alibaba.alink.pipeline.classification.*;
import com.alibaba.alink.pipeline.feature.Binning;
import com.alibaba.alink.pipeline.feature.BinningModel;
import org.apache.flink.types.Row;

import java.util.Arrays;
import java.util.List;

public class Test1 {
    public static void main(String[] args) throws Exception {

//        //（1）加载数据：使用Alink批处理加载数据集，封装数据集DataSet，提取特征features和标签label，前10条样本数据打印控制台；（4分）
        String filePath = "datafile/german.csv";
        String schema = "f1 string,f2 int,f3 string,f4 string,f5 int,f6 string,f7 string,f8 int,f9 string,f10 string,f11 int,f12 string,f13 int,f14 string,f15 string,f16 int,f17 string,f18 int,f19 string,f20 string,label int";
        BatchOperator.setParallelism(1);

        CsvSourceBatchOp csvSource = new CsvSourceBatchOp()
                .setFilePath(filePath)
                .setSchemaStr(schema)
                .setFieldDelimiter(" ");

//        csvSource.print(10);

        // （2）数据清洗：进行合理的数据清洗。（4分）
        //lable 值清洗
        BatchOperator<?> op = new FilterBatchOp().setClause(" label is not null and label <> '' ");
        BatchOperator<?>  dataDS= csvSource.link(op);
//        dataDS.print();

        //（3）特征选择：选择有价值的特征。（4分）
        String [] new_features=new String[]{"f2","f5","f8","f11","f13","f16","f18"};

        String label  ="label";

        // 要保留的列
        String colStr="f2,f5,f8,f11,f13,f16,f18,label";
        // 保留哪些字段
        BatchOperator<?>  selectDS=dataDS.link(new SelectBatchOp().setClause(colStr));
//        selectDS.print();
//       （4） 数据集划分：按照8:2比例划分数据集：train训练集和test测试集，并查看条目数；
        BatchOperator<?> tranData=new SplitBatchOp().setFraction(0.8).linkFrom(selectDS);
        BatchOperator<?> testData=tranData.getSideOutput(0);
        //训练集条数
        System.out.println("tranData.count() = " + tranData.count());
        //测试集条数
        System.out.println("testData.count() = " + testData.count());

//        （5）分箱：采用合适的分箱方法进行变量分箱（4分）
        Binning binning = new Binning()
                .setEncode("WOE")
                .setSelectedCols(new_features)
                .setLabelCol(label)
                .setPositiveLabelValueString("1");
        BinningModel binning_model=binning.fit(tranData);
        // （6）Woe编码：将分箱值转换为woe值（4分）
        binning_model.transform(tranData).print();

        //（7）模型选择：选择三个分类模型，使用训练集训练模型。

        //决策树
        DecisionTreeClassifier dt=new DecisionTreeClassifier()
                .setPredictionDetailCol("pred_detail")
                .setPredictionCol("pred")
                .setLabelCol(label)
                .setFeatureCols(new_features);


        DecisionTreeClassificationModel dt_model=dt.fit(tranData);
        BatchOperator<?> dt_result=dt_model.transform(testData);

        //随机森林
        RandomForestClassifier rc=new RandomForestClassifier()
                .setPredictionDetailCol("pred_detail")
                .setPredictionCol("pred")
                .setLabelCol(label)
                .setFeatureCols(new_features);

        RandomForestClassificationModel rc_model=rc.fit(tranData);
        BatchOperator<?> rc_result=rc_model.transform(testData);

        //朴素贝叶斯
        NaiveBayes ns = new NaiveBayes()
                .setFeatureCols(new_features)
                .setLabelCol(label)
                .setPredictionCol("pred")
                .setPredictionDetailCol("pred_detail");
        NaiveBayesModel ns_model=ns.fit(tranData);
        BatchOperator<?> ns_result=ns_model.transform(testData);

        //（8）模型评估：选择至少三个评估指标，评估模型。（4分）
        //决策树评估
        EvalMultiClassBatchOp evalMultiClassBatchOp = new EvalMultiClassBatchOp()
                .setLabelCol(label)
                .setPredictionDetailCol("pred_detail");
        // 决策树
        MultiClassMetrics dt_metrics = evalMultiClassBatchOp.linkFrom(dt_result).collectMetrics();
        System.out.println("Macro Precision:" + dt_metrics.getMacroPrecision());
        System.out.println("Micro Recall:" + dt_metrics.getMicroRecall());
        System.out.println("Weighted Sensitivity:" + dt_metrics.getWeightedSensitivity());

        // 随机森林
        MultiClassMetrics rc_metrics = evalMultiClassBatchOp.linkFrom(rc_result).collectMetrics();
        System.out.println("Macro Precision:" + rc_metrics.getMacroPrecision());
        System.out.println("Micro Recall:" + rc_metrics.getMicroRecall());
        System.out.println("Weighted Sensitivity:" + rc_metrics.getWeightedSensitivity());

        // 朴素贝叶斯
        MultiClassMetrics nc_metrics = evalMultiClassBatchOp.linkFrom(ns_result).collectMetrics();
        System.out.println("Macro Precision:" + nc_metrics.getMacroPrecision());
        System.out.println("Micro Recall:" + nc_metrics.getMicroRecall());
        System.out.println("Weighted Sensitivity:" + nc_metrics.getWeightedSensitivity());

//        (9)从三个训练好的模型中选择，效果最好的。
        if(dt_metrics.getMicroRecall()>rc_metrics.getMicroRecall()) {
            // 证明决策树更好
            // 决策树 跟朴素比
            if(dt_metrics.getMicroRecall()>nc_metrics.getMicroRecall()) {
                System.out.println("决策树算法预测结果最优");
            }else {
                if (nc_metrics.getMicroRecall() > rc_metrics.getMicroRecall()){
                    System.out.println("朴素贝叶斯算法预测结果最优");
                }else{
                    System.out.println("随机森林算法预测结果最优");
                }

            }
        }else {
            // 随机森林跟朴素比
            if(rc_metrics.getMicroRecall()>nc_metrics.getMicroRecall()) {
                System.out.println("随机森林算法预测结果最优");
            }else{
                // 朴素跟决策树比
                if (nc_metrics.getMicroRecall() > dt_metrics.getMicroRecall()){
                    System.out.println("朴素贝叶斯算法预测结果最优");
                }else{
                    System.out.println("决策树算法预测结果最优");
                }

            }
        }

//        （10）模型应用：模拟一条数据，使用训练好的模型预测。（4分）
        List<Row> df = Arrays.asList(
                Row.of(3,222,2,1,22,1,1,1)
        );
        BatchOperator <?> batchSource = new MemSourceBatchOp(
                df, "f2 int,f5 int,f8 int,f11 int,f13 int,f16 int,f18 int,label int");
        BatchOperator<?> result=rc_model.transform(batchSource);
        result.print();

//        String sinkFilePath = "datafile/test_alink_file_sink";
//        result.link(new AkSinkBatchOp()
//                .setFilePath(new FilePath(sinkFilePath))
//                .setOverwriteSink(true)
//                .setNumFiles(1));

    }
}
