package com.swsc.ai.tasks;

import com.swsc.ai.config.SparkSqlConf;
import com.swsc.ai.constant.EnvConstant;
import com.swsc.ai.util.SparkUtil;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

/**
 * @describe: 客户股票交易行为特征
 * @author: DuanCXin
 * @created: 2023-10-25 9:21
 */
public class StkTransBehaviorTask extends TemplateTask{
    @Override
    public String genInputPath(String dataSource) {
        return null;
    }

    @Override
    protected String genSelectColumns() {
        return "trade_date, client_id, stock_code, entrust_bs, business_amount, business_price";
    }

    @Override
    public String genWhereCondition(String taskDate) {
        String whereCondition = "PARTITION (`"+taskDate+"`)";
        return whereCondition;
    }

    @Override
    public void processData(SparkSession session, String outputPath, String... inputParams) {
        Dataset<Row> stk_coconcept_df = SparkSqlConf.getDataBySql(session, "stk_coconcept", "1 = 1", "innercode", "secucode");
        stk_coconcept_df.createOrReplaceTempView("stk_coconcept");
        Dataset<Row> sqlDF = session.sql("SELECT res.client_id, concat_ws('\t',collect_set(res.behive)) as behives FROM (\n" +
                "SELECT ast.client_id, CONCAT(sc.innercode, ':', ast.entrust_bs, ':', ast.business_amount, ':', ast.business_price, ':', ast.trade_date) as behive FROM (\n" +
                "SELECT trade_date, client_id, stock_code, entrust_bs, business_amount, business_price \n" +
                "FROM " + inputParams[0] + " ORDER BY trade_date) as ast\n" +
                "INNER JOIN (\n" +
                "     SELECT innercode, secucode FROM stk_coconcept ORDER BY secucode\n" +
                ") as sc \n" +
                "ON ast.stock_code = sc.secucode\n" +
                ") as res GROUP BY client_id");


        StructType schema = new StructType(new StructField[]{
                new StructField("client_id", DataTypes.StringType, false, Metadata.empty()),
                new StructField("behives", DataTypes.StringType, false, Metadata.empty())
        });
        String structType = "client_id STRING, behives STRING";
        Dataset<Row> historyData = SparkUtil.generateHistoryData(session, schema, structType, inputParams);
        Dataset<Row> rowDataset = historyData.unionAll(sqlDF);
        System.out.println(inputParams[1] + "：合并后的数据量：" + rowDataset.count());
        Dataset<Row> resultTempData = rowDataset.groupBy("client_id").agg(functions.concat_ws("\t", functions.collect_list("behives")).as("behives"));

        /**
         * 排序结果，并取出每个客户最新5000条交易数据
         */
        resultTempData.select("client_id","behives").createOrReplaceTempView("tempStkView");
        Dataset<Row> sql = session.sql("select client_id, size(split(behives, '\t')), split(behives, '\t') AS actionList from tempStkView");
        /**
         * 使用flatMap函数进行扁平化操作
         */
        Dataset<Row> flatDataset = sql.select(functions.explode(new Column("actionList")), new Column("client_id"))
                .selectExpr("col AS action", "client_id");
        flatDataset.createOrReplaceTempView("stk_temp");

        Dataset<Row> splitStkView = session.sql("select client_id, innercode, entrust_bs, business_amount, business_price, trade_date from\n" +
                "(select client_id,\n" +
                "       split(action,':')[0] as innercode,\n" +
                "       split(action,':')[1] as entrust_bs,\n" +
                "       split(action,':')[2] as business_amount,\n" +
                "       split(action,':')[3] as business_price,\n" +
                "       split(action,':')[4] as trade_date,\n" +
                "       ROW_NUMBER() OVER(PARTITION BY split(action,':')[0] ORDER BY split(action,':')[4] DESC) AS rn\n" +
                "from stk_temp)\n" +
                "where rn <= 5000");
        splitStkView.createOrReplaceTempView("splitStkView");
        Dataset<Row> groupStkView = session.sql("select client_id, CONCAT(innercode, ':', entrust_bs, ':', business_amount, ':', business_price, ':', trade_date) as behive, trade_date from splitStkView");
        groupStkView.createOrReplaceTempView("groupStkView");
        Dataset<Row> resultData = session.sql("SELECT client_id, concat_ws('\t',sort_array(collect_list(struct(trade_date, behive)),true).behive) as behives FROM groupStkView GROUP BY client_id");
        System.out.println(inputParams[1] + "：结果集数据量：" + resultData.count());
        /**
         * 输出当日计算的最新数据结果
         */
        sqlDF.coalesce(1).write()
                .option("header", "true")
                .option("mode", "overwrite")
                .csv(outputPath + EnvConstant.CURRENT_DATA_PATH);
        /**
         * 输出当日数据和历史数据组合的数据结果
         */
        resultData.coalesce(1).write()
                .option("header", "true")
                .option("mode", "overwrite")
                .csv(outputPath  + EnvConstant.HISTORY_DATA_PATH);

    }
}
