package net.bwie.gongdan.code;

import com.alibaba.alink.operator.batch.BatchOperator;
import com.alibaba.alink.operator.batch.dataproc.SplitBatchOp;
import com.alibaba.alink.operator.batch.source.CsvSourceBatchOp;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;

import java.util.HashMap;
import java.util.Map;

import static net.bwie.gongdan.bean.FilePath.*;

public class Alink {
    /**
     * todo：大数据-用户画像-10-用户复购预测
     * todo：结合使用Flink完成模型预测
     *
     * @param args
     * @throws Exception
     */
    public static void main(String[] args) throws Exception {
        //todo:初始化执行环境
        BatchTableEnvironment batchTable = initialization();
        //todo:读取数据集
        Map<String, BatchOperator> readDataFinish = readDataList();
        //todo：提取数据特征
        getAlgorithmFeature(batchTable, readDataFinish);

        //  6.读取样本数据






//        // 9. 划分训练集和验证集
//        BatchOperator[] splits = trainWithFeatures.link(new SplitBatchOp()
//                .setFraction(0.8)
//                .setRandomSeed(0));
//        BatchOperator trainSet = splits[0];
//        BatchOperator validSet = splits[1];
//        // 10. 训练逻辑回归模型
//        BatchOperator model = new LogisticRegressionTrainBatchOp()
//                .setFeatureCols("purchase_count", "distinct_cat_count")
//                .setLabelCol("label")
//                .linkFrom(trainSet);
//        // 11. 模型评估
//        // 11.1 在验证集上进行预测
//        BatchOperator predictResult = new LogisticRegressionPredictBatchOp()
//                .setPredictionCol("predicted_label")
//                .setScoreCol("predicted_score")
//                .linkFrom(model, validSet);
//        // 11.2 计算精确率
//        BatchOperator precision = new PrecisionBatchOp()
//                .setLabelCol("label")
//                .setPredictionCol("predicted_label")
//                .linkFrom(predictResult);
//        precision.print();
//        // 11.3 计算召回率
//        BatchOperator recall = new RecallBatchOp()
//                .setLabelCol("label")
//                .setPredictionCol("predicted_label")
//                .linkFrom(predictResult);
//        recall.print();
//        // 12. 连接测试数据和特征数据
//        BatchOperator testWithFeatures = testData1.join(userFeatures, "user_id");
//        // 13. 模型预测
//        BatchOperator testPredictResult = new LogisticRegressionPredictBatchOp()
//                .setPredictionCol("predicted_label")
//                .setScoreCol("prob")
//                .linkFrom(model, testWithFeatures);
//        // 14. 提取提交格式数据
//        BatchOperator submissionData = testPredictResult.select("user_id", "merchant_id", "prob");
//        // 15. 保存预测结果
//        submissionData.link(new CsvSinkBatchOp()
//                .setFilePath("submission.csv")
//                .setOverwriteSink(true));
        // 执行作业
    }

    /**
     * 初始化Flink环境
     *
     * @return
     */
    private static BatchTableEnvironment initialization() {
        //todo:初始化Flink执行环境
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        BatchTableEnvironment batchTable = BatchTableEnvironment.create(env);
        return batchTable;
    }

    /**
     * todo:读取算法数据，数据返回处理
     *
     * @return
     */
    private static Map<String, BatchOperator> readDataList() {
        //todo:定义hashmap用于存放返回值
        Map<String, BatchOperator> map = new HashMap<>();
        // todo:读取训练数据
        BatchOperator trainData1 = new CsvSourceBatchOp()
                .setFilePath(FILE1_TRAIN_FORMAT1_FILEPATH1)
                .setSchemaStr("user_id long, merchant_id long, label int")
                .setFieldDelimiter(",")
                .setIgnoreFirstLine(true)
                .firstN(20000);
        BatchOperator trainData2 = new CsvSourceBatchOp()
                .setFilePath(FILE2_TRAIN_FORMAT2_FILEPATH2)
                .setSchemaStr("user_id long, age_range int, gender int, merchant_id long, label int, activity_log string")
                .setFieldDelimiter(",")
                .setIgnoreFirstLine(true)
                .firstN(20000);
        // todo:读取测试数据
        BatchOperator testData1 = new CsvSourceBatchOp()
                .setFilePath(FILE1_TEST_FORMAT1_FILEPATH1)
                .setSchemaStr("user_id long, merchant_id long, prob double")
                .setFieldDelimiter(",")
                .setIgnoreFirstLine(true)
                .firstN(20000);
        BatchOperator testData2 = new CsvSourceBatchOp()
                .setFilePath(FILE2_TEST_FORMAT2_FILEPATH2)
                .setSchemaStr("user_id long, age_range int, gender int, merchant_id long, label int, activity_log string")
                .setFieldDelimiter(",")
                .setIgnoreFirstLine(true)
                .firstN(20000);
        // todo:读取用户信息数据
        BatchOperator userInfo = new CsvSourceBatchOp()
                .setFilePath(FILE1_USER_INFO_FORMAT1_FILEPATH1)
                .setSchemaStr("user_id long, age_range int, gender int")
                .setFieldDelimiter(",")
                .setIgnoreFirstLine(true)
                .firstN(20000);
        // todo:读取用户行为日志数据
        BatchOperator userLog = new CsvSourceBatchOp()
                .setFilePath(FILE1_USER_LOG_FORMAT1_FILEPATH1)
                .setSchemaStr("user_id long, item_id long, cat_id long, merchant_id long, brand_id long, time_stamp string, action_type int")
                .setFieldDelimiter(",")
                .setIgnoreFirstLine(true)
                .firstN(20000);
        //todo:存储返回值
        map.put("trainData1", trainData1);
        map.put("trainData2", trainData2);
        map.put("testData1", testData1);
        map.put("testData2", testData2);
        map.put("userInfo", userInfo);
        map.put("userLog", userLog);

        return map;
    }

    /**
     * todo:数据特征提取
     *
     * @param readDataFinish
     */
    private static void getAlgorithmFeature(BatchTableEnvironment batchTable, Map<String, BatchOperator> readDataFinish) throws Exception {
        //todo：获取数据
        Map<String, Table> tableMessage = readDataCreateTable(batchTable, readDataFinish);
        //todo:获取数据
        Table user_log_table = tableMessage.get("user_log_table");
        Table user_info_table = tableMessage.get("user_info_table");
        Table trainData1 = tableMessage.get("trainData1");
        Table trainData2 = tableMessage.get("trainData2");
        Table testData1 = tableMessage.get("testData1");
        Table testData2 = tableMessage.get("testData2");
        //todo:创表
        batchTable.createTemporaryView("user_log_table", user_log_table);
        batchTable.createTemporaryView("user_info_table", user_info_table);
        //todo：数据特征提取
        //计算用户购买次数
        Table purchaseCountResult = batchTable.sqlQuery(
                "SELECT " +
                        "user_id," +
                        "COUNT(*) AS purchase_count " +
                        "FROM user_log_table " +
                        "WHERE action_type = 1 " +
                        "GROUP BY user_id"
        );
        //purchaseCountResult.execute().print();
        //计算购买不同品类数量
        Table differentCategoriesCountResult = batchTable.sqlQuery(
                        "SELECT " +
                        "user_id," +
                        "cat_id," +
                        "COUNT(*) AS categorie_count " +
                        "FROM user_log_table " +
                        "GROUP BY user_id,cat_id"
        );
        //DifferentCategoriesCountResult.execute().print();
        //todo:起别名
        Table t1 = purchaseCountResult.as("t1");
        Table t2 = differentCategoriesCountResult.as("t2");
        System.out.println(t1.getSchema());
        System.out.println(t2.getSchema());
        //todo: 合并特征
        Table userFeatures = t1.join(t2);
        Table t3 = userFeatures.as("t3");
        Table t4 = trainData1.as("t4");
        //todo: 连接训练数据和特征数据
        Table trainUserFeatures = t4.join(t3);
        //类型转换
        BatchOperator <?> batchOperator = BatchOperator.fromTable(trainUserFeatures);
        //todo:划分训练集和数据集
        SplitBatchOp splitBatchOp = new SplitBatchOp()
                .setFraction(0.8);
        SplitBatchOp trainData = batchOperator.link(splitBatchOp);

        System.out.println("训练集条数:"+trainData.count());
        BatchOperator<?> testData = trainData.getSideOutput(0);
        System.out.println("测试集条数:"+testData.count());

        //        // 9. 划分训练集和验证集
//        BatchOperator[] splits = trainWithFeatures.link(new SplitBatchOp()
//                .setFraction(0.8)
//                .setRandomSeed(0));
//        BatchOperator trainSet = splits[0];
//        BatchOperator validSet = splits[1];
//        // 10. 训练逻辑回归模型
//        BatchOperator model = new LogisticRegressionTrainBatchOp()
//                .setFeatureCols("purchase_count", "distinct_cat_count")
//                .setLabelCol("label")
//                .linkFrom(trainSet);
    }


    /**
     * todo:获取数据创表
     *
     * @param readDataFinish
     */
    private static Map<String, Table> readDataCreateTable(BatchTableEnvironment batchTable, Map<String, BatchOperator> readDataFinish) {
        //todo：提取数据
        BatchOperator trainData1 = readDataFinish.get("trainData1");
        BatchOperator trainData2 = readDataFinish.get("trainData2");
        BatchOperator testData1 = readDataFinish.get("testData1");
        BatchOperator testData2 = readDataFinish.get("testData2");
        BatchOperator userInfo = readDataFinish.get("userInfo");
        BatchOperator userLog = readDataFinish.get("userLog");
        //todo：转换成table类型
        Table user_log__table = batchTable.fromDataSet(userLog.getDataSet());
        Table user_info_table = batchTable.fromDataSet(userInfo.getDataSet());
        Table trainDataOne = batchTable.fromDataSet(trainData1.getDataSet());
        Table trainDataTwo = batchTable.fromDataSet(trainData2.getDataSet());
        Table testDataOne = batchTable.fromDataSet(testData1.getDataSet());
        Table testDataTwo = batchTable.fromDataSet(testData2.getDataSet());
        //todo:创建hashmap存放表数据
        Map<String, Table> map = new HashMap<>();
        //todo:存储表
        map.put("user_log_table", user_log__table);
        map.put("user_info_table", user_info_table);
        map.put("trainData1", trainDataOne);
        map.put("trainData2", trainDataTwo);
        map.put("testData1", testDataOne);
        map.put("testData2", testDataTwo);
        //todo:返回表
        return map;
    }
}

/*todo：***********************************废案************************************/

// 7. 特征工程
// 7.1 计算用户购买次数
//        BatchOperator purchaseCount = userLog
//                .where("action_type = 1")
//                .groupBy("user_id","")
//                .select("count(*) as purchase_count");
//        // 7.2 计算购买不同品类数量
//        BatchOperator distinctCatCount = new CountDistinctBatchOp()
//                .setSelectedCol("cat_id")
//                .setGroupByCols("user_id")
//                .setOutputCol("distinct_cat_count");
//        // 7.3 合并特征
//        BatchOperator userFeatures = purchaseCount.join(distinctCatCount, "user_id");
//        // 8. 连接训练数据和特征数据
//        BatchOperator trainWithFeatures = trainData1.join(userFeatures, "user_id");
//        // 9. 划分训练集和验证集
//        BatchOperator[] splits = trainWithFeatures.link(new SplitBatchOp()
//                .setFraction(0.8)
//                .setRandomSeed(0));
//        BatchOperator trainSet = splits[0];
//        BatchOperator validSet = splits[1];
//        // 10. 训练逻辑回归模型
//        BatchOperator model = new LogisticRegressionTrainBatchOp()
//                .setFeatureCols("purchase_count", "distinct_cat_count")
//                .setLabelCol("label")
//                .linkFrom(trainSet);