package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.util.KafkaUtil;
import com.atguigu.gmall.realtime.util.MysqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * @author caodan
 * @version 1.0
 * @date 2022-09-27 17:40
 * <p>
 * 交易域加购事务事实表
 */
public class DwdTradeAddCartApp {


    public static void main(String[] args) {
        // TODO 创建flink实时环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(5));

        //TODO  检查点相关设置
        /*//检查点保存间隔
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        //检查点两次的最小间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //检查点的超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        // 重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        // job 取消 状态后端是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // 设置转状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs:hadoop102:8020/xxxx");*/

        //TODO 通过flink 连接器从kafka中消费数据
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `data` map<string, string>,\n" +
                "  `old` map<string, string>,\n" +
                "  `ts` STRING,\n" +
                "  `proc_time` as PROCTIME()\n" +
                ") " + KafkaUtil.getKafkaDDLPorps("topic_db","dwd_trade_cart_add_group"));
//        tableEnv.executeSql("select * from topic_db").print();
        // TODO 过滤出加购的数据
        Table cartAdd = tableEnv.sqlQuery(
                "select " +
                        "data['id'] id," +
                        "data['user_id'] user_id," +
                        "data['sku_id'] sku_id," +
                        "data['source_id'] source_id," +
                        "data['source_type'] source_type," +
                        "if(`type` = 'insert'," +
                        "data['sku_num'],cast((cast(data['sku_num'] as int) - cast(`old`['sku_num'] as int)) as string)) sku_num," +
                        "ts," +
                        "proc_time" +
                        " from `topic_db` " +
                        " where `table` = 'cart_info'" +
                        " and (`type` = 'insert'" +
                        " or (`type` = 'update' " +
                        " and `old`['sku_num'] is not null " +
                        " and cast(data['sku_num'] as int) > cast(`old`['sku_num'] as int)))");
        tableEnv.createTemporaryView("cart_add", cartAdd);

//        tableEnv.executeSql("select * from cart_add").print();
        // TODO 通过使用JDBC-CONNECT连接器获取字典表数据
        tableEnv.executeSql(MysqlUtil.getMysqlDDLStr());

//        tableEnv.executeSql("select * from base_dic").print();
        // TODO 将加购数据和字典表关联 lookupjoin生成数据
        Table resultTable = tableEnv.sqlQuery("select " +
                "cadd.id," +
                "user_id," +
                "sku_id," +
                "source_id," +
                "source_type," +
                "dic_name source_type_name," +
                "sku_num," +
                "ts" +
                " from cart_add cadd " +
                " join base_dic for system_time as of cadd.proc_time as dic " +
                " on cadd.source_type=dic.dic_code");
        tableEnv.createTemporaryView("result_table",resultTable);

        // TODO 查询加购数据和字典表的关联数据写入kafka主题中 创建kafka动态表
        // 如果将数据print,则数据将不会放入kafka中
        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add (\n" +
                "id STRING,\n" +
                "user_id STRING,\n" +
                "sku_id STRING,\n" +
                "source_id STRING,\n" +
                "source_type STRING,\n" +
                "source_type_name STRING,\n" +
                "sku_num STRING,\n" +
                "ts STRING,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") " + KafkaUtil.getUpsetKafkaProps("dwd_trade_cart_add"));
        tableEnv.executeSql("insert into dwd_trade_cart_add select * from result_table");
    }

}
