package com.nepu.gmall.realtime.app.dwd;

import com.nepu.gmall.realtime.util.KafkaUtils;
import com.nepu.gmall.realtime.util.MysqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * 本类解决的问题是:交易域加购事务事实表的处理；
 * 主要的操作就是维度的退化：有一个source_type需要和base_dic表的数据进行关联
 * 其次是保留每次加购的操作
 * @author chenshuaijun
 * @create 2023-02-26 21:05
 */
public class DwdTradeCartAdd {

    public static void main(String[] args) throws Exception {

        // TODO 1、加载执行环境，这里分为流式的执行环境和表执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 设置checkpoint的信息：设置checkpoint的间隔是5分钟,并且checkpoint的级别是精确一次性
        /*env.enableCheckpointing(5 * 60000L, CheckpointingMode.EXACTLY_ONCE);
        // 设置checkpoint的超时时间是10分钟
        env.getCheckpointConfig().setCheckpointTimeout(10 * 60000L);
        // 设置外部检查点。可以将检查点的元数据信息定期写入外部系统，这样当job失败时，检查点不会被清除。这样如果job失败，可以从检查点恢复job。
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置checkpoint的重启的策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(10, Time.of(1L, TimeUnit.DAYS), Time.of(3L, TimeUnit.MINUTES)));
        // 设置两个checkpoint之间的最小的间隔时间
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        // 设置状态后端: 设置状态后端为内存级别
        env.setStateBackend(new HashMapStateBackend());
        // 设置checkpoint的存储的路径
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/checkpoint");
        // 因为我们的HDFS只有atguigu用户才能够操作，所以要将用户设置为atguigu
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/
        // TODO 2、从topic_db中读取业务数据
        /*tableEnv.executeSql("CREATE TABLE topic_db ( " +
                "  `database` string, " +
                "  `table` string, " +
                "  `type` string, " +
                "  `data` map<string,string>, " +
                "  `old` map<string,string>, " +
                "  `proc_time` as PROCTIME() " +
                ")" + KafkaUtils.getKafkaDDL("topic_db","DwdTradeCartAdd"));*/
        // 从topicDB中读取数据
        tableEnv.executeSql(MysqlUtil.getTopicDBLookUpDDL("DwdTradeCartAdd"));

        // TODO 3、过滤出cart_info中的数据，找到对应的加购的操作：（1）insert操作；（2）update操作，这个操作必须是old数据中有sku_num字段，并且sku_num要小于data中的sku_num
        Table cartAddFilterTable = tableEnv.sqlQuery("" +
                "select " +
                "    `data`['id'] id, " +
                "    `data`['user_id'] user_id, " +
                "    `data`['sku_id'] sku_id, " +
                "    `data`['cart_price'] cart_price, " +
                "    `data`['sku_name'] sku_name, " +
                "    if(`type`='insert',`data`['sku_num'],cast(cast(`data`['sku_num'] as int)-cast(`old`['sku_num'] as int) as string)) sku_num, " +
                "    `data`['is_checked'] is_checked, " +
                "    `data`['create_time'] create_time, " +
                "    `data`['operate_time'] operate_time, " +
                "    `data`['is_ordered'] is_ordered, " +
                "    `data`['order_time'] order_time, " +
                "    `data`['source_type'] source_type, " +
                "    `data`['source_id'] source_id, " +
                "     proc_time " +
                "from topic_db " +
                "where `table`='cart_info' and  " +
                "(`type`='insert' or (`type`='update' and `old`['sku_num'] is not null and cast(`old`['sku_num'] as int)<cast(`data`['sku_num'] as int)))");
        tableEnv.createTemporaryView("cart_add",cartAddFilterTable);

//        tableEnv.toAppendStream(cartAddFilterTable,Row.class).print(">>>>>");
        // TODO 4、查询出字典表中的数据
        tableEnv.executeSql(MysqlUtil.getBaseDicLookUpDDL());
        // TODO 5、双表join
        Table resultTable = tableEnv.sqlQuery("" +
                "select " +
                "    ca.id, " +
                "    ca.user_id, " +
                "    ca.sku_id, " +
                "    ca.cart_price, " +
                "    ca.sku_num, " +
                "    ca.sku_name, " +
                "    ca.is_checked, " +
                "    ca.create_time, " +
                "    ca.operate_time, " +
                "    ca.is_ordered, " +
                "    ca.order_time, " +
                "    ca.source_type source_type_id, " +
                "    dic.dic_name source_type_name, " +
                "    ca.source_id " +
                "from cart_add ca join base_dic for system_time as of ca.proc_time as dic " +
                "on ca.source_type = dic.dic_code");
        // TODO 6、数据写出
        tableEnv.createTemporaryView("resultTable",resultTable);
//        tableEnv.toAppendStream(resultTable,Row.class).print(">>>>>");
        tableEnv.executeSql("" +
                "create table dwd_trade_cart_add( " +
                "    `id` string, " +
                "    `user_id` string, " +
                "    `sku_id` string, " +
                "    `cart_price` string, " +
                "    `sku_num` string, " +
                "    `sku_name` string, " +
                "    `is_checked` string, " +
                "    `create_time` string, " +
                "    `operate_time` string, " +
                "    `is_ordered` string, " +
                "    `order_time` string, " +
                "    `source_type_id` string, " +
                "    `source_type_name` string, " +
                "    `source_id` string " +
                ")" +KafkaUtils.getKafkaSinkDDL("dwd_trade_cart_add"));

        tableEnv.executeSql("insert into dwd_trade_cart_add select * from resultTable");

        env.execute();
    }
}
