package com.intct.dws;

import com.intct.common.FlinkSqlWithUtil;
import com.intct.func.CurrentTime;
import com.intct.func.RowKeyFunction;
import com.intct.func.ValuesFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author gufg
 * @since 2025-10-25 16:36
 */
public class DwsOrderCurrDayCumlateMoneySQL {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        env.setParallelism(1);
        env.enableCheckpointing(5000L);//每五秒一个快照

        //kafka
        tenv.executeSql(
                "CREATE TABLE dwd_kafka_orders (" +
                        "    id INT," +
                        "    op STRING," +
                        "    order_no STRING," +
                        "    order_type  INT," +//订单类型
                        "    state INT," +//下单
                        "    create_time timestamp(3)," +
                        "    update_time timestamp(3)," +
                        "    pay_amount double," +
                        "    WATERMARK FOR update_time AS update_time - INTERVAL '0' SECOND" +
                        ")" + FlinkSqlWithUtil.getKafkaSourceWith("dwd_orders", "dwd_kafka_orders", ""));

        // 注册操作类型特殊处理
        tenv.createTemporarySystemFunction("VALUES_FUNC", ValuesFunction.class);

        tenv.executeSql("SELECT" +
                "     window_start" +
                "     , window_end" +
//                "     , order_no" +
//                "     , COUNT(order_no) AS total_count" +
                "     , SUM(VALUES_FUNC(op, pay_amount)) AS total_amount" +
                "     , SUM(VALUES_FUNC(op,CASE WHEN state = 5 THEN pay_amount ELSE 0 END)) AS maybe_money" +
                " FROM" +
                " TABLE(" +
                "   CUMULATE(" +
                "     TABLE dwd_kafka_orders, DESCRIPTOR(update_time), INTERVAL '10' SECOND, INTERVAL '1' DAY" +
                "   )" +
                " )" +
                " GROUP BY" +
                "    window_start" +
                "    , window_end"
        ).print();
    }
}
