package com.atcyj.gmall.realtime.dwd.db.app;

import com.atcyj.gamll.realtime.common.base.BaseSqlApp;
import com.atcyj.gamll.realtime.common.constant.Constant;
import com.atcyj.gamll.realtime.common.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author: cyj
 * @date: 2024/8/16
 */
public class DwdInteractionCommentInfo extends BaseSqlApp {
    public static void main(String[] args) {
        new DwdInteractionCommentInfo().start(10012, 4, "dwd_interaction_comment_info");
    }

    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv, String groupId) {
        //维度退化

        //  1. 通过 ddl 的方式建立动态表: 从 topic_db 读取数据 (source)
        createTopicDB(groupId, tableEnv);
        // 3. 过滤出评论表数据
        Table commentInfo = tableEnv.sqlQuery(
                "SELECT\n" +
                        "    `data`['id'] id, \n" +
                        "    `data`['user_id'] user_id, \n" +
                        "    `data`['sku_id'] sku_id, \n" +
                        "    `data`['spu_id'] spu_id, \n" +
                        "    `data`['order_id'] order_id, \n" +
                        "    `data`['appraise'] appraise, \n" +
                        "    `data`['comment_txt'] comment_txt, \n" +
                        "    `data`['create_time'] comment_time, \n" +
                        "    proc_time \n" +
                        " from topic_db\n" +
                        " where `database` = 'gmall'\n" +
                        " and `table` = 'comment_info'\n" +
                        " and `type` = 'insert'");
        // 创建虚拟表
        tableEnv.createTemporaryView("comment_info", commentInfo);


        // 4. 通过 ddl 方式建表: base_dic hbase 中的维度表 (source)
        createBaseDic(tableEnv);
        // 为什么base_dic不用创建虚拟表？

        // 5. 事实表与维度表的 join: lookup join
        Table result = tableEnv.sqlQuery(
                "SELECT\n" +
                        "      id,\n" +
                        "      user_id,\n" +
                        "      sku_id,\n" +
                        "      spu_id,\n" +
                        "      order_id,\n" +
                        "      appraise appraise_code,\n" +
                        "      b.dic_name appraise_name,\n" +
                        "      comment_txt,\n" +
                        "      comment_time \n" +
                        " from comment_info a \n" +
                        " join base_dic FOR SYSTEM_TIME AS OF a.proc_time AS b  \n" +
                        " on a.appraise = b.dic_code");

        // 6. 创建kakfa 对应的表
        tableEnv.executeSql(
                "CREATE TABLE " + Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO + "(\n" +
                        "    id string,\n" +
                        "    user_id string,\n" +
                        "    sku_id string,\n" +
                        "    spu_id string,\n" +
                        "    order_id string,\n" +
                        "    appraise_code string,\n" +
                        "    appraise_name string,\n" +
                        "    comment_txt string,\n" +
                        "    comment_time string\n" +
                        ") " + SQLUtil.getKafkaSinkSQL(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO));
        //7. 写入到kafka 主题中
        //  result.insertInto(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO).execute();
        result.executeInsert(Constant.TOPIC_DWD_INTERACTION_COMMENT_INFO);

    }
}

