package com.kdpujie.alink;

import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.classification.GbdtPredictBatchOp;
import com.alibaba.alink.operator.batch.classification.GbdtTrainBatchOp;
import com.alibaba.alink.operator.batch.classification.MultilayerPerceptronPredictBatchOp;
import com.alibaba.alink.operator.batch.classification.MultilayerPerceptronTrainBatchOp;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.evaluation.EvalBinaryClassBatchOp;
import com.alibaba.alink.operator.batch.source.CsvSourceBatchOp;
import com.alibaba.alink.operator.common.evaluation.BinaryClassMetrics;
import com.alibaba.alink.pipeline.Pipeline;
import com.alibaba.alink.pipeline.dataproc.StandardScaler;
import com.alibaba.alink.pipeline.feature.FeatureHasher;


/**
 * 以Kaggle的CTR的比赛数据，特征工程、训练、评估LR模型
 */
public class MlpcClassifier {
    public static void main(String[] args) {
        String schemaStr = "id string, click string, dt string, C1 string, banner_pos int, site_id string, site_domain string, site_category string, app_id string, app_domain string, app_category string, device_id string, "
	    + "device_ip string, device_model string, device_type string, device_conn_type string, C14 int, C15 int, C16 int, C17 int, C18 int, C19 int, C20 int, C21 int";
        // 原始训练数据读入，大小约1.2G  /Users/pujie/codes/data/avazu-ctr-train-8M.csv
        // CsvSourceBatchOp originBatchData = new CsvSourceBatchOp()
        // .setFilePath("https://ubix-bigdata-test.oss-cn-beijing-internal.aliyuncs.com/ml_test/avazu-ctr-train-8M.csv") 
        // .setSchemaStr(schemaStr)
        // .setIgnoreFirstLine(true);
        // 原始测试数据读入，大小约68M: /Users/pujie/codes/data/avazu-small.csv https://ubix-bigdata-test.oss-cn-beijing-internal.aliyuncs.com/ml_test/avazu-small.csv
        CsvSourceBatchOp originAllData = new CsvSourceBatchOp()
        .setFilePath("/Users/pujie/codes/data/avazu-small.csv")
        .setSchemaStr(schemaStr);
        BatchOperator<?> originTrainData = new SplitBatchOp().setFraction(0.9).linkFrom(originAllData);
        BatchOperator<?> originPredictData = originTrainData.getSideOutput(0); 

        String labelName = "click";  // 标签列
        /**
         * 特征列，把特征分为连续特征、离散特征
         */
        String[] selectedNames = new String[] {
            "C1", "banner_pos", "site_category", "app_domain", "app_category", "device_type", "device_conn_type", "C14", "C15", "C16", "C17", "C18", "C19", "C20", "C21", "site_id", "site_domain", "device_id", "device_model"
        };
        String[] categoryNames = new String[] {
            "C1", "banner_pos", "site_category", "app_domain", "app_category", "device_type", "device_conn_type", "site_id", "site_domain", "device_id", "device_model"
        };
        String[] numericalNames = new String[] {
            "C14", "C15", "C16", "C17", "C18", "C19", "C20", "C21"
        };
        
         // 特征工程后，把多特征合并成一个特征向量，该特征向量的列名
         String vectorlName = "vec";
         // 特征工程的 pipeline
         Pipeline featurePipeline = new Pipeline()
         .add(
             new StandardScaler().setSelectedCols(numericalNames)
         ).add(
             new FeatureHasher().setSelectedCols(selectedNames).setCategoricalCols(categoryNames).setOutputCol(vectorlName).setNumFeatures(30000)
         ); 

        try {
            // 特征工程
            BatchOperator<?> trainData = featurePipeline.fit(originTrainData).transform(originTrainData);
            BatchOperator<?> predictData = featurePipeline.fit(originPredictData).transform(originPredictData);

            // GBDT 模型
            BatchOperator<?> trainOp = new MultilayerPerceptronTrainBatchOp().setLabelCol(labelName).setVectorCol(vectorlName).setLayers(new int[]{30000, 8, 2}).setMaxIter(20);
            
            // 预测
            BatchOperator<?> model = trainOp.linkFrom(trainData);
            model.print();
            // BatchOperator<?> predictor1 = new MultilayerPerceptronPredictBatchOp().setPredictionCol("pred").setPredictionDetailCol("detail").linkFrom(model, originAllData);
            BatchOperator<?> predictor2 = new MultilayerPerceptronPredictBatchOp().setPredictionCol("pred").linkFrom(model, predictData);
            // predictor.linkFrom(initModel, trainData).sample(0.001).print();
            // predictor2.print();

            // 评估
            // BinaryClassMetrics metrics1 = new EvalBinaryClassBatchOp().setLabelCol(labelName).setPredictionDetailCol("detail").linkFrom(predictor1).collectMetrics();
            // System.out.println("对训练数据的评估：");
            // System.out.println("AUC:" + metrics1.getAuc());
            // System.out.println("Accuracy:" + metrics1.getAccuracy());
            // System.out.println("LogLoss:" + metrics1.getLogLoss());

            BinaryClassMetrics metrics2 = new EvalBinaryClassBatchOp().setLabelCol(labelName).setPredictionDetailCol("detail").linkFrom(predictor2).collectMetrics();
            System.out.println("对测试数据的评估：");
            System.out.println("AUC:" + metrics2.getAuc());
            System.out.println("Accuracy:" + metrics2.getAccuracy());
            System.out.println("LogLoss:" + metrics2.getLogLoss());

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}