package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.utils.HbaseUtil;
import com.atguigu.gmall.realtime.utils.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Felix
 * @date 2023/8/3
 * 评论事实表
 * 需要启动的进程
 *      zk、kafka、maxwell、hdfs、hbase、DwdInteractionCommentInfo
 */
public class DwdInteractionCommentInfo {
    public static void main(String[] args) {
        //TODO 1.基本环境的准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2.检查点相关的设置(略)
        //TODO 3.从kafka的主题topic_db中读取数据创建动态表---kafka连接器
        tableEnv.executeSql(KafkaUtil.getTopicDbDDL("dwd_interaction_comment_group"));

        // tableEnv.executeSql("select * from topic_db").print();
        //TODO 4.过滤出评论数据
        Table commentTable = tableEnv.sqlQuery("select\n" +
            "    `data`['id'] id,\n" +
            "    `data`['user_id'] user_id,\n" +
            "    `data`['sku_id'] sku_id,\n" +
            "    `data`['appraise'] appraise,\n" +
            "    `data`['comment_txt'] comment_txt,\n" +
            "    ts,\n" +
            "    proc_time\n" +
            "from topic_db where `table`='comment_info' and `type`='insert'");
        tableEnv.createTemporaryView("comment_table",commentTable);
        // tableEnv.executeSql("select * from comment_table").print();

        //TODO 5.从hbase中读取字典维度表数据 并创建动态表---hbase连接器
        tableEnv.executeSql(HbaseUtil.getBaseDicLookUpDDL());
        // tableEnv.executeSql("select dic_code,dic_name from base_dic").print();
        //TODO 6.关联评论和字典维度---lookup join
        Table joinedTable = tableEnv.sqlQuery("SELECT \n" +
            "    id,\n" +
            "    user_id,\n" +
            "    sku_id,\n" +
            "    appraise,\n" +
            "    d.dic_name appraise_name,\n" +
            "    comment_txt,\n" +
            "    ts\n" +
            "FROM comment_table AS c JOIN base_dic \n" +
            "FOR SYSTEM_TIME AS OF c.proc_time AS d ON c.appraise = d.dic_code");
        tableEnv.createTemporaryView("joined_table",joinedTable);
        // tableEnv.executeSql("select * from joined_table").print();
        //TODO 7.将关联的结果写到kafka主题 --- upsert-kafka连接器
        //7.1 创建一个动态表和要写入的主题进行映射
        tableEnv.executeSql("CREATE TABLE res_table (\n" +
            "   id string,\n" +
            "   user_id string,\n" +
            "   sku_id string,\n" +
            "   appraise string,\n" +
            "   appraise_name  string,\n" +
            "   comment_txt  string,\n" +
            "   ts  string,\n" +
            "   PRIMARY KEY (id) NOT ENFORCED\n" +
            ") " + KafkaUtil.getUpsertKafkaDDL("dwd_interaction_comment"));
        //7.2 写入
        tableEnv.executeSql("insert into res_table select * from joined_table");
    }
}
