package com.bujunjie.study.realtime.dwd.db.app;

import com.bujunjie.study.realtime.common.base.BaseSQLApp;
import com.bujunjie.study.realtime.common.constant.FlinkConstant;
import com.bujunjie.study.realtime.common.util.SQLUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * <p>增加购物车</p>
 *
 * @author bu.junjie
 * @version 1.0.0
 * @createTime 2025/9/17 9:59
 */
public class DwdTradeCartAdd extends BaseSQLApp {

    public static void main(String[] args) {
        new DwdTradeCartAdd().start(10013, 4, FlinkConstant.TOPIC_DWD_TRADE_CART_ADD);
    }

    @Override
    public void handle(StreamTableEnvironment tableEnv) {
        //  从 kafka 里面读取数据
        this.readOdsDb(tableEnv, FlinkConstant.TOPIC_DB);
        Table cartInfo = tableEnv.sqlQuery("SELECT\n" +
                "`data`['id'] AS id,\n" +
                "`data`['user_id'] AS user_id,\n" +
                "`data`['sku_id'] AS sku_id,\n" +
                "if(`type`='insert' ,`data`['sku_num'],CAST((CAST(`data`['sku_num'] AS INT) - CAST(`old`['sku_num'] AS INT)) AS  STRING )) sku_num,\n" +
                "ts\n" +
                "FROM topic_db" +
                "WHERE `table` = 'cart_info' \n" +
                "AND (type = 'insert' OR (type='update' AND `old`['sku_num'] IS NOT NULL AND (CAST(`data`['sku_num'] AS INT) > CAST(`old`['sku_num'] AS INT))))");

        String createTableSql = "CREATE TABLE " + FlinkConstant.TOPIC_DWD_TRADE_CART_ADD + " (\n" +
                "  id string,\n" +
                "  userId string,\n" +
                "  skuId string,\n" +
                "  skuNum string,\n" +
                "  ts bigint,\n" +
                "  PRIMARY KEY (`id`) NOT ENFORCED\n" +
                ") \n" + SQLUtil.getUpsertKafkaDDL(FlinkConstant.TOPIC_DWD_TRADE_CART_ADD);
        //  创建表
        tableEnv.executeSql(createTableSql);
        cartInfo.executeInsert(FlinkConstant.TOPIC_DWD_TRADE_CART_ADD);
    }
}
