package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.common.GmallConstant;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2023/8/1 14:20
 */
public class Dwd_02_DwdInteractionCommentInfo extends BaseSQLApp {
    public static void main(String[] args) {
        new Dwd_02_DwdInteractionCommentInfo().start(
            30002,
            1,
            "Dwd_02_DwdInteractionCommentInfo"
        );
        
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
        // 1. 读取数据 ods_db
        //tEnv.executeSql("ddl 增删改");
        //tEnv.sqlQuery("查询");
        readOdsDb(tEnv, "Dwd_02_DwdInteractionCommentInfo");
        
        // 2. 从 ods_db 中过滤出 comment_info 表的数据
        Table commentInfo = tEnv.sqlQuery(
            "select " +
                "data['id'] id," +
                "data['user_id'] user_id," +
                "data['sku_id'] sku_id," +
                "data['appraise'] appraise," +
                "data['comment_txt'] comment_txt," +
                "ts," +
                "pt " +
                "from ods_db " +
                "where `database`='gmall2023' " +
                "and `table`='comment_info' " +
                "and `type`='insert'");
        tEnv.createTemporaryView("comment_info", commentInfo);
        
        // 3. 读取字典表(hbase 连接器)
        readBaseDic(tEnv);
        // 4. lookup join: 维度退化
        Table result = tEnv.sqlQuery(
            "select " +
                "ci.id," +
                "ci.user_id," +
                "ci.sku_id," +
                "ci.appraise," +
                "dic.info.dic_name appraise_name," +
                "comment_txt," +
                "ts " +
                "from comment_info ci " +
                "join base_dic for system_time as of ci.pt as dic " +
                "on ci.appraise=dic.dic_code ");
        result.execute().print();
        
        // 5. 写出到dwd 层(kafka 中)
        tEnv.executeSql("create table dwd_interaction_comment_info(" +
                            "id string," +
                            "user_id string," +
                            "sku_id string," +
                            "appraise string," +
                            "appraise_name string," +
                            "comment_txt string," +
                            "ts bigint " +
                            ")" + SQLUtil.getKafkaSinkSQL(GmallConstant.TOPIC_DWD_INTERACTION_COMMENT_INFO));
        result.executeInsert("dwd_interaction_comment_info");
    }
}
/*
评论:
    读数据源: comment_info   insert
    
    维度退化:
        comment_info
            lookup join
        base_dic
        
    写出到 kafka 中
  

-----
业务数据:
    数据库
    结构化数据

sql
流技术

复杂表 用 sql 技术
    事实表如何涉及到一些比较复杂的计算: join, 一些字段需要特殊处理等
    
    一个应用处理一张事实表
        评论表: 涉及 join
        下单表: 涉及到 join
        订单取消: ..
        支付成功
        加购
        退单
        退款
    
简单表 用流
    对 mysql 的数据只是做一个简单过滤, 提取. 使用动态分流技术
    
    一个 app 实现多种表的处理
    
        用户注册
        优惠券的领取
        优惠券的使用
        收藏

-----
map
    map<string, string>

row
    row(id string, spu_id string)
*/