package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import com.atguigu.gmall.realtime.util.MySqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Felix
 * @date 2022/9/30
 * 交易域：加购事实表
 * 需要启动的进程
 *      zk、kafka、maxwell、DwdTradeCartAdd
 * 执行流程
 *      当修改了加购表中加购商品的数量
 *      会将变化记录到binlog中
 *      maxwell会从binlog中读取变化数据，并封装json字符串发送给kafka的topic_db主题
 *      DwdTradeCartAdd从topic_db主题中读取业务变化数据
 *      从中过滤出加购行为
 *          table=cart_info
 *          type=insert or (type=update  and old['sku_num'] is not null and data['sku_num']>old['sku_num'])
 *      从mysql数据库中读取字典表数据
 *      将字典维度数据退化到加购事实表中
 *      创建一个动态表和要写入的kafka主题进行映射
 *      将关联之后的加购事实表数据写到kafka的主题中
 */
public class DwdTradeCartAdd {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2.检查点相关的设置(略)
        //TODO 3.从topic_db中读取业务数据 映射为动态表
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_trade_cart_add_group"));
        //TODO 4.过滤出加购行为
        Table cartAdd = tableEnv.sqlQuery("select\n" +
            "    `data`['id'] id,\n" +
            "    `data`['user_id'] user_id,\n" +
            "    `data`['sku_id'] sku_id,\n" +
            "    `data`['source_type'] source_type,\n" +
            "    ts,\n" +
            "    proc_time,\n" +
            "    if(\n" +
            "        `type`='insert',`data`['sku_num'],CAST((CAST(`data`['sku_num'] AS INT) - CAST(`old`['sku_num'] AS INT)) AS string)\n" +
            "    ) sku_num \n" +
            "from topic_db\n" +
            "where `table`='cart_info'\n" +
            "and (`type`='insert' or\n" +
            "   (`type`='update' and `old`['sku_num'] is not null and CAST(`data`['sku_num'] AS INT) > CAST(`old`['sku_num'] AS INT) )\n" +
            ")");
        tableEnv.createTemporaryView("cart_add", cartAdd);

        // tableEnv.executeSql("select * from cart_add").print();

        //TODO 5.从MySQL数据库中查询字典表数据
        tableEnv.executeSql(MySqlUtil.getBaseDicLookUpDDL());

        //TODO 6.将加购表和字典表进行关联(字典维度退化到事实表过程)
        Table resTable = tableEnv.sqlQuery("select\n" +
            "    cadd.id,cadd.user_id,cadd.sku_id,cadd.sku_num,cadd.source_type,dic.dic_name as source_type_name,ts\n" +
            "FROM cart_add AS cadd\n" +
            "  JOIN base_dic FOR SYSTEM_TIME AS OF cadd.proc_time AS dic\n" +
            "    ON cadd.source_type = dic.dic_code");
        tableEnv.createTemporaryView("res_table",resTable);

        //TODO 7.将关联的结果写到kafka对应的主题中
        //7.1 创建动态表和要写入的主题进行映射
        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add (\n" +
            "  id string,\n" +
            "  user_id string,\n" +
            "  sku_id string,\n" +
            "  sku_num  string,\n" +
            "  source_type string,\n" +
            "  source_type_name  string,\n" +
            "  ts string,\n" +
            "  PRIMARY KEY (id) NOT ENFORCED\n" +
            ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_cart_add"));
        //7.2 写入
        tableEnv.executeSql("insert into dwd_trade_cart_add select * from res_table");
    }
}
