package com.bujunjie.study.realtime.dwd.db.app;

import com.bujunjie.study.realtime.common.base.BaseSQLApp;
import com.bujunjie.study.realtime.common.constant.FlinkConstant;
import com.bujunjie.study.realtime.common.util.SQLUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

/**
 * <p>取消下单操作</p>
 *
 * @author bu.junjie
 * @version 1.0.0
 * @createTime 2025/9/17 10:39
 */
public class DwdTradeOrderCancelDetail extends BaseSQLApp {

    public static void main(String[] args) {
        new DwdTradeOrderCancelDetail().start(10015, 4, FlinkConstant.TOPIC_DWD_TRADE_ORDER_CANCEL);
    }

    @Override
    public void handle(StreamTableEnvironment tableEnv) {
        // 当前采用的是内连接，需要设置状态的超时时间
        tableEnv.getConfig().setIdleStateRetention(Duration.ofSeconds(30 * 60 + 5));
        //  1. 从 ods 里面读取数据
        this.readOdsDb(tableEnv, FlinkConstant.TOPIC_DWD_TRADE_ORDER_CANCEL);
        // 2. 查询交易订单取消的数据
        Table orderCancel = tableEnv.sqlQuery("SELECT\n" +
                " `data`['id'] AS id,\n" +
                " `data`['operate_time'] operate_time,\n" +
                " `ts` \n" +
                " FROM topic_db \n" +
                " WHERE `table` = 'order_info' \n" +
                " AND `old`['order_status']='1001'\n" +
                " AND `data`['order_status']='1003'");
        //  设置当前查询的表转换成临时注册表
        tableEnv.createTemporaryView("order_cancel", orderCancel);
        //  查找订单明细表,从订单kafka中查询
        tableEnv.executeSql("create table dwd_trade_order_detail(" +
                "id string," +
                "order_id string," +
                "user_id string," +
                "sku_id string," +
                "sku_name string," +
                "province_id string," +
                "activity_id string," +
                "activity_rule_id string," +
                "coupon_id string," +
                "date_id string," +
                "create_time string," +
                "sku_num string," +
                "split_original_amount string," +
                "split_activity_amount string," +
                "split_coupon_amount string," +
                "split_total_amount string," +
                "ts bigint " +
                ")" + SQLUtil.getKafkaDDL(FlinkConstant.TOPIC_DWD_TRADE_ORDER_DETAIL, FlinkConstant.TOPIC_DWD_TRADE_ORDER_CANCEL, FlinkConstant.KAFKA_BROKERS));

        Table result = tableEnv.sqlQuery(
                "select  " +
                        "od.id," +
                        "od.order_id," +
                        "od.user_id," +
                        "od.sku_id," +
                        "od.sku_name," +
                        "od.province_id," +
                        "od.activity_id," +
                        "od.activity_rule_id," +
                        "od.coupon_id," +
                        "date_format(oc.operate_time, 'yyyy-MM-dd') order_cancel_date_id," +
                        "oc.operate_time," +
                        "od.sku_num," +
                        "od.split_original_amount," +
                        "od.split_activity_amount," +
                        "od.split_coupon_amount," +
                        "od.split_total_amount," +
                        "oc.ts " +
                        "from dwd_trade_order_detail od " +
                        "join order_cancel oc " +
                        "on od.order_id=oc.id "
        );
        //  创建对应的表

        tableEnv.executeSql("create table " + FlinkConstant.TOPIC_DWD_TRADE_ORDER_CANCEL + "(" +
                "id string," +
                "order_id string," +
                "user_id string," +
                "sku_id string," +
                "sku_name string," +
                "province_id string," +
                "activity_id string," +
                "activity_rule_id string," +
                "coupon_id string," +
                "date_id string," +
                "cancel_time string," +
                "sku_num string," +
                "split_original_amount string," +
                "split_activity_amount string," +
                "split_coupon_amount string," +
                "split_total_amount string," +
                "ts bigint ," +
                "PRIMARY KEY (id) NOT ENFORCED " +
                ")" + SQLUtil.getUpsertKafkaDDL(FlinkConstant.TOPIC_DWD_TRADE_ORDER_CANCEL));
        // 将取消的接口写入到 kafka 中
        result.executeInsert(FlinkConstant.TOPIC_DWD_TRADE_ORDER_CANCEL);
    }
}
