package com.atguigu.gmall.realtime.app.dwd.db;

import com.atguigu.gmall.realtime.app.BaseSQLApp;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2023/2/13 08:29
 */
public class Dwd_02_DwdTradeCartAdd extends BaseSQLApp {
    public static void main(String[] args) {
        new Dwd_02_DwdTradeCartAdd().init(3002, 2, "Dwd_02_DwdTradeCartAdd");
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       StreamTableEnvironment tEnv) {
        // 1. 通过 ddl 方式,读取 kafka 中 ods_db 的数据
        //        tEnv.executeSql("增删改, ddl 语句");
        //        tEnv.sqlQuery("查询语句");
        readOdsDb(tEnv, "Dwd_02_DwdTradeCartAdd");
        
        // 2. 过滤加购数据: insert update(数量变大)
        Table cartInfo = tEnv.sqlQuery("select " +
                                           "data['id'] id," +
                                           "data['user_id'] user_id," +
                                           "data['sku_id'] sku_id," +
                                           "data['source_id'] source_id," +
                                           "data['source_type'] source_type," +  // 维度退化用
                                           "if(`type` = 'insert'," +
                                           "data['sku_num'],cast((cast(data['sku_num'] as int) - cast(`old`['sku_num'] as int)) as string)) sku_num," +
                                           "ts," +
                                           "pt " +
                                           "from ods_db " +
                                           "where `database`='gmall2022' " +
                                           "and `table`='cart_info' " +
                                           "and (" +
                                           " `type`='insert' " +
                                           "  or (`type`='update' " +
                                           "       and `old`['sku_num'] is not null " +  // 表示 sku_num 发生了变化
                                           "       and cast(`data`['sku_num'] as int) > cast(`old`['sku_num'] as int) )" +  // sku_num 变大
                                           ")");
        tEnv.createTemporaryView("cart_info", cartInfo);
        
        // 3. 读取字典表
        readBaseDic(tEnv);
        // 4. 维度退化: 根据source_type的值, 取字典表中查找对应的 dic_name
        // 事实表:cartInfo 维度表: dic_code 表的 join lookup join
        Table result = tEnv.sqlQuery("select " +
                                         "ci.id," +
                                         "user_id," +
                                         "sku_id," +
                                         "source_id," +
                                         "source_type source_type_code," +
                                         "dic_name source_type_name," +
                                         "sku_num," +
                                         "ts " +
                                         "from cart_info ci " +
                                         "join base_dic for system_time as of ci.pt as dic " +
                                         "on ci.source_type=dic.dic_code");
        
        // 5. 写出到kafka 中: 使用普通的 kafka
        tEnv.executeSql("create table dwd_trade_cart_add(" +
                            "id string," +
                            "user_id string," +
                            "sku_id string," +
                            "source_id string," +
                            "source_type_code string," +
                            "source_type_name string," +
                            "sku_num string," +
                            "ts bigint " +
                            ")" + SQLUtil.getDDLKafkaSink(Constant.TOPIC_DWD_TRADE_CART_ADD));
    
        result.executeInsert("dwd_trade_cart_add");
        
    }
}
/*
--------
sku_id    sku_num               取变化量
2            1       insert     直接取 1
1            3       insert     直接取 3
2            3       update     直接取 3-1=2

 "cast(data['sku_num'] as int) - cast(nvl(`old`['sku_num'], '0') as int)) sku_num,"  +


加购事务事实表

数据源: ods_db 中的 cart_info   insert   update(数量变大)

维度退化
    source_type source_type_name
    2401            商品推广
    
    
    cart_info 事实表,做成流
    base_dic  维度表
        事实表与维度表的 join

最后写入到 dwd 层: kafka

----------

流的技术还是 sql:
    用 sql
    
    1. sql 相对来说比较简单. 很多企业用 flink 就是为了使用 flink sql 来处理实时数据
    2. 业务表, 都在关系型数据库中, 都是结构化数据, 也比较适合用 sql

 */