package com.bw;

import com.alibaba.alink.common.io.filesystem.FilePath;
import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.evaluation.EvalBinaryClassBatchOp;
import com.alibaba.alink.operator.batch.evaluation.EvalMultiClassBatchOp;
import com.alibaba.alink.operator.batch.nlp.SegmentBatchOp;
import com.alibaba.alink.operator.batch.nlp.StopWordsRemoverBatchOp;
import com.alibaba.alink.operator.batch.sink.AkSinkBatchOp;
import com.alibaba.alink.operator.batch.source.CsvSourceBatchOp;
import com.alibaba.alink.operator.batch.source.TsvSourceBatchOp;
import com.alibaba.alink.operator.batch.sql.FilterBatchOp;
import com.alibaba.alink.operator.batch.utils.UDFBatchOp;
import com.alibaba.alink.operator.common.evaluation.BinaryClassMetrics;
import com.alibaba.alink.operator.common.evaluation.MultiClassMetrics;
import com.alibaba.alink.pipeline.PipelineModel;
import com.alibaba.alink.pipeline.classification.KnnClassifier;
import com.alibaba.alink.pipeline.classification.LogisticRegression;
import com.alibaba.alink.pipeline.classification.NaiveBayes;
import com.alibaba.alink.pipeline.classification.RandomForestClassifier;
import com.alibaba.alink.pipeline.nlp.Word2Vec;
import com.alibaba.alink.pipeline.tuning.GridSearchCV;
import com.alibaba.alink.pipeline.tuning.GridSearchCVModel;
import com.alibaba.alink.pipeline.tuning.MultiClassClassificationTuningEvaluator;
import com.alibaba.alink.pipeline.tuning.ParamGrid;
import org.apache.flink.table.functions.ScalarFunction;

public class Test2 {
    public static void main(String[] args) throws Exception {
        BatchOperator.setParallelism(1);
        //（1）、加载数据集，自定义Flink UDF函数，转换标签字段值spam为1，ham为0；（4分）
        String filepath = "datafile/sms_spam.tsv";
        BatchOperator<?> data1 = new TsvSourceBatchOp()
                .setFilePath(filepath)
                .setSchemaStr("label string,content string")
                // 忽略空格行
                .setSkipBlankLine(true);

        // 自定义
        UDFBatchOp data2 = new UDFBatchOp()
                .setFunc(new EmailFunction())
                .setSelectedCols("label")
                .setOutputCol("newlabel")
                .linkFrom(data1);

//        data2.print();

        //使用Alink库分词和文本特征提取API，对邮件内容进行分词Segment、去除停用词StopWordsRemover、单词向量Word2Vec，获取特征features，显示样本数据；
        BatchOperator <?> segment = new SegmentBatchOp()
                .setSelectedCol("content")
                .setOutputCol("segment")
                .linkFrom(data2);

//        segment.print();
//
//        // 停用词过滤
        BatchOperator <?> remover = new StopWordsRemoverBatchOp()
                .setSelectedCol("segment")
                .setOutputCol("remover").linkFrom(segment);
//        remover.print();
//        // 过滤
        BatchOperator <?> op = new FilterBatchOp()
                .setClause("remover is not null and  CHAR_LENGTH(TRIM(remover)) > 0");
//                .setClause("remover is not null");// 有空置
        remover = remover.link(op);
//
//        // 单词向量
        Word2Vec word2vec = new Word2Vec().setSelectedCol("remover")
                .setMinCount(1)
                .setVectorSize(4)
                .setOutputCol("out");

        BatchOperator<?> data3 = word2vec.fit(remover).transform(remover);
        data3.print();

        // （3）将上述提取特征值和标签的数据集，划分训练集tainData和测试集testData，并保存本地AK文件；（3分）
        BatchOperator <?> trainData = new SplitBatchOp().setFraction(0.8).linkFrom(data3);
        BatchOperator<?> testData = trainData.getSideOutput(0);
//        //TODO 保存AK文件 ->  	AkSinkBatchOp
//
//        String trainData_filePath = "datafile/test_alink_file_sink1/trainData1.ak";
//        trainData.link(new AkSinkBatchOp()
//                .setFilePath(new FilePath(trainData_filePath))
//                .setOverwriteSink(true)
//                .setNumFiles(1));
//
//        String testData_filePath = "datafile/test_alink_file_sink1/testData1.ak";
//        testData.link(new AkSinkBatchOp()
//                .setFilePath(new FilePath(testData_filePath))
//                .setOverwriteSink(true)
//                .setNumFiles(1));

        //使用Alink中逻辑回归、KNN近邻、XgBoost分类、决策树分类算法任选两种算法API，对训练数据集构建分类模型，并且使用测试数据集评估模型，打印输出准确率Accuracy；
        //逻辑回归
        LogisticRegression lr = new LogisticRegression()
                .setVectorCol("out")
                .setLabelCol("newlabel")
                .setPredictionCol("pred")
                .setPredictionDetailCol("pred_detail");
        BatchOperator<?> lr_result = lr.fit(trainData).transform(testData);
        lr_result.print();
        // Knn
        KnnClassifier knn = new KnnClassifier()
                        .setVectorCol("out")
                        .setPredictionCol("pred")
                        .setLabelCol("newlabel")
                        .setPredictionDetailCol("pred_detail")
                        .setK(3);
        BatchOperator<?> resultknn = knn.fit(testData).transform(testData);
        resultknn.print();

        // 逻辑用二分类
        BinaryClassMetrics lr_metrics = new EvalBinaryClassBatchOp()
                .setLabelCol("newlabel")
                .setPredictionDetailCol("pred_detail")
                .linkFrom(lr_result).collectMetrics();

        // knn 用多分类
        MultiClassMetrics   knn_metrice=new EvalMultiClassBatchOp()
                .setLabelCol("newlabel")
                .setPredictionDetailCol("pred_detail")
                .linkFrom(resultknn).collectMetrics();

        System.out.println("lr_metrics Accuracy:"+lr_metrics.getAccuracy());
        System.out.println("knn_metrice Accuracy:"+knn_metrice.getAccuracy());
//
//        BatchOperator.execute();



    }


    public static class EmailFunction extends ScalarFunction {
        public Integer eval(String s) {
            if("spam".equals(s)){
                return 1;
            }else{
                return 0;
            }
        }
    }
}
