package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import com.atguigu.gmall.realtime.util.MySqlUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Felix
 * @date 2022/8/2
 * 交易域加购事实表准备
 * 需要启动的进程
 *      zk、kafka、maxwell、DwdTradeCartAdd
 * 执行流程
 *      对业务数据库加购表中加购数量进行修改(小->大)
 *      binlog会将业务数据库表的变化记录下来
 *      maxwell从binlog中读取变化的数据，并封装为json格式字符串，发送到kafka的topic_db主题中
 *      DwdTradeCartAdd从topic_db主题中读取数据
 *      对读取的数据进行过滤，找出加购表中新增以及对加购数据进行调大的行为
 *      从MySQL字典表中获取字典维度数据
 *      将加购表和字典表进行连接
 *      创建一个动态表和要写回的主题进行映射
 *      将连接的结果写到kafa的dwd_trade_cart_add
 */
public class DwdTradeCartAdd {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2.检查点相关设置(略)
        //TODO 3.从kafka的topic_db主题中读取业务数据  创建动态表
        String topic = "topic_db";
        String groupId = "dwd_trade_cart_group";
        tableEnv.executeSql("create table topic_db(\n" +
            " `database` string,\n" +
            " `table` string,\n" +
            " `type` string,\n" +
            " `ts` string,\n" +
            " `old` MAP<string, string>,\n" +
            " `data` MAP<string, string>,\n" +
            " `proc_time` as proctime()\n" +
            ")" + MyKafkaUtil.getKafkaDDL(topic, groupId));

        // tableEnv.executeSql("select * from topic_db").print();

        //TODO 4.从业务表中过滤出加购行为
        Table cartAdd = tableEnv.sqlQuery("select \n" +
            " data['id'] id,\n" +
            " data['user_id'] user_id,\n" +
            " data['sku_id'] sku_id,\n" +
            " data['source_type'] source_type,\n" +
            " if(\n" +
            " `type`='insert',\n" +
            " data['sku_num'],\n" +
            " CAST((CAST(data['sku_num'] AS INT) - CAST(`old`['sku_num'] AS INT)) as string)\n" +
            " ) sku_num,\n" +
            " ts,\n" +
            " proc_time\n" +
            "from  topic_db\n" +
            "where  `table` = 'cart_info' \n" +
            "and (`type`='insert' or (`type`='update' and `old`['sku_num'] is not null and CAST(data['sku_num'] AS INT)> CAST(`old`['sku_num'] AS INT)))");

        tableEnv.createTemporaryView("cart_add",cartAdd);
        // tableEnv.executeSql("select * from cart_add").print();

        //TODO 5.从mysql的字典表中读取数据 创建动态表
        tableEnv.executeSql(MySqlUtil.getBaseDicLookUpDDL());

        // tableEnv.executeSql("select * from base_dic").print();


        //TODO 6.将加购表和字典表进行连接   (字典维度退化到事实表)
        Table joinedTable = tableEnv.sqlQuery("SELECT cadd.id,cadd.user_id,cadd.sku_id,cadd.sku_num,cadd.source_type,dic.dic_name source_type_name,ts\n" +
            " FROM cart_add AS cadd\n" +
            "  JOIN base_dic FOR SYSTEM_TIME AS OF cadd.proc_time AS dic  ON cadd.source_type = dic.dic_code");

        tableEnv.createTemporaryView("joined_table",joinedTable);
        // tableEnv.executeSql("select * from joined_table").print();

        //TODO 7.创建一个表 和要写回的kafka主题进行映射
        tableEnv.executeSql("CREATE TABLE dwd_trade_cart_add (\n" +
            "  id string,\n" +
            "  user_id string,\n" +
            "  sku_id string,\n" +
            "  sku_num string,\n" +
            "  source_type string,\n" +
            "  source_type_name string,\n" +
            "  ts string,\n" +
            "  PRIMARY KEY (id) NOT ENFORCED\n" +
            ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_trade_cart_add"));

        //TODO 8.将关联后的加购事实数据写到kafka主题中
        tableEnv.executeSql("insert into dwd_trade_cart_add select * from joined_table");

    }
}
