package com.kdpujie.alink;

import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.evaluation.EvalBinaryClassBatchOp;
import com.alibaba.alink.operator.batch.source.CsvSourceBatchOp;
import com.alibaba.alink.operator.common.evaluation.BinaryClassMetrics;
import com.alibaba.alink.pipeline.Pipeline;
import com.alibaba.alink.pipeline.PipelineModel;
import com.alibaba.alink.pipeline.classification.LogisticRegression;
import com.alibaba.alink.pipeline.dataproc.StandardScaler;
import com.alibaba.alink.pipeline.feature.FeatureHasher;

/**
 * 以Kaggle的CTR的比赛数据，
 * 使用pipeline的方式练习特征工程、模型训练、评估
 */
public class PipelineLr {
    public static void main(String[] args) {
        String schemaStr = "id string, click string, dt string, C1 string, banner_pos int, site_id string, site_domain string, site_category string, app_id string, app_domain string, app_category string, device_id string, "
	    + "device_ip string, device_model string, device_type string, device_conn_type string, C14 int, C15 int, C16 int, C17 int, C18 int, C19 int, C20 int, C21 int";

        CsvSourceBatchOp originAllData = new CsvSourceBatchOp()
        .setFilePath("/Users/pujie/codes/data/avazu-small.csv")
        .setSchemaStr(schemaStr);
        BatchOperator<?> originTrainData = new SplitBatchOp().setFraction(0.99).linkFrom(originAllData);
        BatchOperator<?> originPredictData = originTrainData.getSideOutput(0); 

        String labelName = "click";  // 标签列
        /**
         * 特征列，把特征分为连续特征、离散特征
         */
        String[] selectedNames = new String[] {
            "C1", "banner_pos", "site_category", "app_domain", "app_category", "device_type", "device_conn_type", "C14", "C15", "C16", "C17", "C18", "C19", "C20", "C21", "site_id", "site_domain", "device_id", "device_model"
        };
        String[] categoryNames = new String[] {
            "C1", "banner_pos", "site_category", "app_domain", "app_category", "device_type", "device_conn_type", "site_id", "site_domain", "device_id", "device_model"
        };
        String[] numericalNames = new String[] {
            "C14", "C15", "C16", "C17", "C18", "C19", "C20", "C21"
        };
        // 特征工程后，把多特征合并成一个特征向量，该特征向量的列名
        String vectorlName = "vec";

        try {
            // 特征工程的 pipeline
            Pipeline pipeline = new Pipeline()
            .add(
                new StandardScaler().setSelectedCols(numericalNames)
            ).add(
                new FeatureHasher().setSelectedCols(selectedNames).setCategoricalCols(categoryNames).setOutputCol(vectorlName).setNumFeatures(30000)
            ).add(
                new LogisticRegression().setLabelCol(labelName).setVectorCol(vectorlName).setPredictionCol("pred").setPredictionDetailCol("detail")
            ); 
            PipelineModel model = pipeline.fit(originTrainData); // 训练
 
            BatchOperator<?> predict = model.transform(originPredictData).select(new String[] {labelName, "pred", "detail"});
            
            // 对训练数据的评估
            BinaryClassMetrics metrics1 = new EvalBinaryClassBatchOp().setLabelCol(labelName).setPredictionDetailCol("detail").linkFrom(predict).collectMetrics();
            System.out.println("对预测数据的评估：");
            System.out.println("\tAUC:" + metrics1.getAuc());
            System.out.println("\tAccuracy:" + metrics1.getAccuracy());
            System.out.println("\tLogLoss:" + metrics1.getLogLoss());

            System.out.println(model.toJson());
            

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}