package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import com.atguigu.gmall.realtime.util.MySQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * Author: Felix
 * Date: 2022/6/17
 * Desc: 交易域加购事务事实表
 * 需要启动的进程
 *      zk、kafka、maxwell、DwdTradeCartAdd
 * 开发流程
 *      基本环境准备
 *      检查点相关设置
 *      从kafka的topic_db主题中读取数据 创建业务数据动态表    ----kafka连接器
 *      从业务数据动态表中过滤出加购数据
 *          table='cart_info'
 *          type = 'insert' or (type='update' and data['sku_num'] > old['sku_num'])
 *      从业务数据库中查询字典表数据  创建字典动态表          ----jdbc连接器
 *      使用lookup join 将加购表和字典表进行连接            ----lookup join
 *      创建一个动态表和要写入kafka主题进行映射              ----upsert kafka连接器
 *      将连接之后的数据写到kafka的dwd_trade_cart_add主题(加购事务事实表)
 * 执行流程
 *      运行模拟生成业务数据的jar包
 *      将生成的业务数据保存到业务数据库mysql中
 *      binlog会记录业务数据库发生的变化
 *      maxwell从binlog中读取变化数据，并封装为json字符串发送给kafka的topic_db主题
 *      DwdTradeCartAdd从topic_db主题中读取数据创建业务动态表
 *      从业务数据动态表中过滤出加购数据
 *      从业务数据库中查询字典表数据
 *      将加购表和字典表进行连接
 *      将连接之后的数据写到kafka的dwd_trade_cart_add主题
 */
public class DwdTradeCartAdd {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 并行度设置
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1.4 设置状态的TTL
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(5));

        /*
        //TODO 2.检查点相关设置
        //2.1 开启检查点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置检查点超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 设置job取消后检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 设置两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
        //2.6 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop202:8020/xxxx");
        //2.7 设置操作hadoop的用户
        System.setProperty("HADOOP_USER_NAME","atguigu");
        */
        //TODO 3.从kafka的topic_db主题中读取数据 创建业务数据动态表
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
            "  `database` string,\n" +
            "  `table` string,\n" +
            "  `type` STRING,\n" +
            "  `ts` string,\n" +
            "  `data` MAP<string, string>,\n" +
            "  `old` MAP<string, string>,\n" +
            "  proc_time as proctime()\n" +
            ")" + MyKafkaUtil.getKafkaDDL("topic_db","dwd_trade_cart_add_group"));

        //tableEnv.executeSql("select * from topic_db").print();

        //TODO 4.从业务数据动态表中将加购数据过滤出来---加购动态表
        Table cartAddTable = tableEnv.sqlQuery("select \n" +
            " `data`['id'] id,\n" +
            " `data`['user_id'] user_id,\n" +
            " `data`['sku_id'] sku_id,\n" +
            " `data`['source_type'] source_type,\n" +
            " if(`type`='insert',`data`['sku_num'],cast((CAST(`data`['sku_num'] AS INT) - CAST(`old`['sku_num'] AS INT)) " +
            " as string)) sku_num,\n" +
            " ts,\n" +
            " proc_time\n" +
            "from\n" +
            " topic_db\n" +
            "where \n" +
            " `table`='cart_info' and ( `type` = 'insert'  or (`type` = 'update'  and `old`['sku_num'] is not null " +
            " and CAST(`data`['sku_num'] AS INT) > CAST(`old`['sku_num'] AS INT)))");

        tableEnv.createTemporaryView("cart_add",cartAddTable);
        //tableEnv.executeSql("select * from cart_add").print();


        //TODO 5.从业务系统MySQL数据库中读取字典表数据  创建字典动态表
        tableEnv.executeSql(MySQLUtil.getBaseDicLookUpDDL());

        //TODO 6.将加购表和字典表进行关联
        Table resTable = tableEnv.sqlQuery("select\n" +
            " cadd.id,\n" +
            " cadd.user_id,\n" +
            " cadd.sku_id,\n" +
            " cadd.sku_num,\n" +
            " cadd.ts,\n" +
            " cadd.source_type,\n" +
            " dic.dic_name source_type_name\n" +
            "from\n" +
            " cart_add cadd join base_dic FOR SYSTEM_TIME AS OF cadd.proc_time AS dic on cadd.source_type = dic.dic_code");

        tableEnv.createTemporaryView("res_table",resTable);

        //tableEnv.executeSql("select * from res_table").print();

        //TODO 7. 创建一个动态表和kafka的dwd_trade_cart_add进行映射
        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add (\n" +
            "  id STRING,\n" +
            "  user_id STRING,\n" +
            "  sku_id STRING,\n" +
            "  sku_num STRING,\n" +
            "  ts STRING,\n" +
            "  source_type STRING,\n" +
            "  source_type_name string,\n" +
            "  PRIMARY KEY (id) NOT ENFORCED\n" +
            ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_cart_add"));

        //TODO 8. 将关联结果写到kafka对应的主题中dwd_trade_cart_add
        tableEnv.executeSql("insert into dwd_trade_cart_add select * from res_table");

    }
}
