package com.atguigu.gmall.realtime.app.dwd;

import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import com.atguigu.gmall.realtime.utils.MySqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Felix
 * @date 2022/12/30
 * 交易域加购事实表
 * 需要启动进程
 *      zk、kafka、maxwell、DwdTradeCartAdd
 * 执行流程
 *      运行模拟生成业务数据jar包
 *      将生成的业务数据保存到业务数据库MySQL表中
 *      binlog会记录业务数据库表的变化
 *      maxwell从binlog中读取业务数据表的变化，并封装为json字符串，发送到kafka的topic_db主题中
 *      DwdTradeCartAdd从topic_db主题中读取数据创建动态表
 *      从中过滤出加购行为
 *      从mysql中读取字典表数据
 *      将加购表和字典表进行关联
 *      将关联的结果写到kafka主题中
 */
public class DwdTradeCartAdd {
    public static void main(String[] args) {
        //TODO 1.基本环境转换
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        /*
        //TODO 2.检查点相关的设置
        //2.1 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 设置job取消之后  检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 设置两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        //2.6 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop202:8020/xxx");
        //2.7 设置操作hdfs的用户
        System.setProperty("HADOOP_USER_NAME","atguigu");
        */
        //TODO 3.从kafka的topic_db主题中读取数据  创建动态表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_trade_cart_add_group"));
        // tableEnv.executeSql("select * from topic_db").print();
        //TODO 4.过滤出加购行为---加购表
        Table cartAdd = tableEnv.sqlQuery("select\n" +
            "    `data`['id'] id,\n" +
            "    `data`['user_id'] user_id,\n" +
            "    `data`['sku_id'] sku_id,\n" +
            "    `data`['source_type'] source_type,\n" +
            "    if(`type`='insert',`data`['sku_num'],cast((CAST(`data`['sku_num'] AS INT) - CAST(`old`['sku_num'] AS INT)) as string)) sku_num,\n" +
            "    ts,\n" +
            "    proc_time\n" +
            "from \n" +
            "    topic_db\n" +
            "where \n" +
            "     `table`='cart_info' \n" +
            "and (`type`='insert' or (`type`='update' and `old`['sku_num'] is not null \n" +
            "    and CAST(`data`['sku_num'] AS INT) > CAST(`old`['sku_num'] AS INT)))");
        tableEnv.createTemporaryView("cart_add",cartAdd);
        // tableEnv.executeSql("select * from cart_add").print();
        //TODO 5.从mysql中读取字典表数据---字典表
        tableEnv.executeSql(MySqlUtil.getBaseDicLookUpDDL());
        // tableEnv.executeSql("select * from base_dic").print();

        //TODO 6.使用lookupjoin将加购表和字典表进行连接
        Table resTable = tableEnv.sqlQuery("SELECT \n" +
            "   cadd.id,\n" +
            "   cadd.user_id,\n" +
            "   cadd.sku_id,\n" +
            "   cadd.sku_num,\n" +
            "   cadd.ts,\n" +
            "   cadd.source_type,\n" +
            "   dic.dic_name source_type_name\n" +
            "FROM cart_add AS cadd JOIN base_dic FOR SYSTEM_TIME AS OF cadd.proc_time AS dic\n" +
            "  ON cadd.source_type = dic.dic_code");
        tableEnv.createTemporaryView("res_table",resTable);
        // tableEnv.executeSql("select * from res_table").print();
        //TODO 7.将关联的结果写到kafka主题
        //7.1 创建动态表和要写入的主题进行映射
        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add (\n" +
            "  id string,\n" +
            "  user_id string,\n" +
            "  sku_id string,\n" +
            "  sku_num string,\n" +
            "  ts string,\n" +
            "  source_type string,\n" +
            "  source_type_name string,\n" +
            "  PRIMARY KEY (id) NOT ENFORCED\n" +
            ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_cart_add"));
        //7.2 写入
        tableEnv.executeSql("insert into dwd_trade_cart_add select * from res_table");

    }
}
