package com.intct.ods;

import com.intct.common.FlinkSqlWithUtil;
import com.intct.func.ValuesFunction;
import com.intct.utils.FlinkChenageLogConverUtil;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @author gufg
 * @since 2025-10-25 16:25

 测试数据：
    INSERT INTO `travel`.`order_info` (`id`, `order_no`, `state`, `pay_amount`) VALUES (1, '176155637179934577830', 1, 100);

    INSERT INTO `travel`.`order_info` (`id`, `order_no`, `state`, `pay_amount`) VALUES (2, '176155637179934577831', 1, 100);

    INSERT INTO `travel`.`order_info` (`id`, `order_no`, `state`, `pay_amount`) VALUES (3, '176155637179934577832', 1, 100);

    INSERT INTO `travel`.`order_info` (`id`, `order_no`, `state`, `pay_amount`) VALUES (4, '176155637179934577830', 1, 100);

    update `travel`.`order_info` set `state` = 5 where id = 1;

    INSERT INTO `travel`.`order_info` (`id`, `order_no`, `state`, `pay_amount`) VALUES (5, '176155637179934577830', 1, 100);

    update `travel`.`order_info` set `state` = 5 where id = 2;

    update `travel`.`order_info` set `pay_amount` = 50 where id = 3;

    INSERT INTO `travel`.`order_info` (`id`, `order_no`, `state`, `pay_amount`) VALUES (6, '176155637179934577830', 1, 200);

    INSERT INTO `travel`.`order_info` (`id`, `order_no`, `state`, `pay_amount`) VALUES (7, '176155637179934577830', 1, 100);

    select `id`, `order_no`, `state`, `pay_amount`, `update_time` from `travel`.`order_info`

    truncate table `travel`.`order_info`

    delete from `travel`.`order_info` where id = 6
 */
public class OdsOrderSQL {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        env.setParallelism(1);
        // 开启检查点，并且设置检查为一次语义
        env.enableCheckpointing(6000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/test/ckpt");

        tenv.executeSql("CREATE TABLE mysql_dwd_order_info ( " +
                "     id	INT                               " +
                "     ,op	STRING                            " +
                "     ,order_no	STRING                        " +
                "     ,state	INT                           " +
                "     ,update_time	TIMESTAMP(3)              " +
                "     ,pay_amount	DOUBLE                    " +
                "    ,PRIMARY KEY(id) NOT ENFORCED            " +
                ")" + FlinkSqlWithUtil.getMysqlCdcWith("travel","order_info","latest-offset"));

                /*启动模式
                    配置选项scan.startup.mode指定 MySQL CDC 使用者的启动模式。有效枚举包括：
                    initial （默认）：在第一次启动时对受监视的数据库表执行初始快照，并继续读取最新的 binlog。
                    earliest-offset：跳过快照阶段，从可读取的最早 binlog 位点开始读取
                    latest-offset：首次启动时，从不对受监视的数据库表执行快照， 连接器仅从 binlog 的结尾处开始读取，这意味着连接器只能读取在连接器启动之后的数据更改。
                    specific-offset：跳过快照阶段，从指定的 binlog 位点开始读取。位点可通过 binlog 文件名和位置指定，或者在 GTID 在集群上启用时通过 GTID 集合指定。
                    timestamp：跳过快照阶段，从指定的时间戳开始读取 binlog 事件。
                */

        // 将SQL转换为DataStream
        DataStream<Row> mysqlDwdOrderInfo = tenv.toChangelogStream(tenv.from("mysql_dwd_order_info"));
        SingleOutputStreamOperator<Row> dwdOrderInfo = FlinkChenageLogConverUtil.changeLogConver(mysqlDwdOrderInfo, "op");

        // 将DataStreamAPI转为SQL
        Schema schema = Schema.newBuilder()
                .column("id", DataTypes.INT())
                .column("op", DataTypes.STRING())
                .column("order_no", DataTypes.STRING())
                .column("state", DataTypes.INT())
                .column("update_time", DataTypes.TIMESTAMP(3))
                .column("pay_amount", DataTypes.DOUBLE())
                .columnByExpression("rt", "update_time")
                .watermark("rt", "rt - interval  '0' second")
                .build();
        tenv.createTemporaryView("conver_dwd_order_info", tenv.fromDataStream(dwdOrderInfo, schema));

        // 注册操作类型特殊处理
        tenv.createTemporarySystemFunction("VALUES_FUNC", ValuesFunction.class);

        tenv.executeSql("SELECT" +
//                        "     window_start" +
                        "     window_end" +
                        "     , SUM(VALUES_FUNC(op)) AS total_count" +
                        "     , SUM(VALUES_FUNC(op,pay_amount)) AS total_amount" +
                        "     , SUM(CASE WHEN state = 5 THEN pay_amount ELSE 0 END) AS maybe_money" +
                        " FROM" +
                        " TABLE(" +
                        "   CUMULATE(" +
                        "     TABLE conver_dwd_order_info, DESCRIPTOR(rt), INTERVAL '10' SECOND, INTERVAL '1' DAY" +
                        "   )" +
                        " )" +
                        " GROUP BY" +
                        "    window_start" +
                        "    , window_end"
        ).print();

        env.execute();
    }
}
