package com.intct.flink.study;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.types.RowKind;

/**
 * @author gufg
 * @since 2025-10-14 14:08
 */
public class SQLTest1 {
    public static void main(String[] args) throws Exception {
        // 配置属性
        Configuration conf = new Configuration();
        conf.set(RestOptions.BIND_PORT, "8081");

        // 1 获取取环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        // 并行度
        env.setParallelism(1);
        // 开启检点
        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        // 获取SQL执行环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 映射mysql的订单
        tenv.executeSql(
                "create table flink_test01( " +
                            // 如果是Flink sql的关键字，需要用反引号转义
                            "`user`              INT " +
                            ",order_time    TIMESTAMP(3) " +
//                            ",rt AS update_time           " +
//                            ",watermark for order_time as order_time - interval '0' second " +
                            ",PRIMARY KEY (`user`) NOT ENFORCED " +
                        ") with ( " +
                            "  'connector' = 'mysql-cdc' " +
                            "  ,'hostname' = 'cdh-node' " +
                            "  ,'port' = '13306' " +
                            "  ,'username' = 'root' " +
                            "  ,'password' = 'Test_090110' " +
                            "  ,'database-name' = 'm1' " +
                            "  ,'table-name' = 'orders_info' " +
                        ") ");

        // 2 将SQL转换为DataStream
        // 2.1 将SQL转为Table实例
        Table flinkTest01 = tenv.from("flink_test01");
        // 2.2 转换  ==> +I  -U +U -D
        DataStream<Row> rowDataStream = tenv.toChangelogStream(flinkTest01);

        // 2.3 将撤回流中的-U -D 过滤掉, +U转为+I
        SingleOutputStreamOperator<Row> filterDS = rowDataStream.filter(row -> {
            RowKind rowKind = row.getKind();
            String rowkKindStr = rowKind.shortString();
            boolean isType = "+I".equals(rowkKindStr) || "+U".equals(rowkKindStr);
            // 将数据中的数据变更改为增加
            row.setKind(RowKind.INSERT);
            // 过滤掉-D -U
            return isType;
        });

        // 3 将DataStreamAPI转为SQL
        Table table = tenv.fromDataStream(filterDS, Schema.newBuilder()
                .column("user", DataTypes.INT())
                .column("order_time", DataTypes.TIMESTAMP(3))
                .columnByExpression("rt", "order_time")
                .watermark("rt", "rt - interval  '0' second")
                .build());
        tenv.createTemporaryView("tmp", table);
//        tenv.executeSql("select * from tmp").print();

        tenv.executeSql(
                "select " +
                        "    window_start " +
                        "    ,window_end " +
                        "    ,user " +
                        "    ,COUNT(user) pv  " +
                        "    ,COUNT(DISTINCT user) uv  " +
                        "from  " +
                        // Windowing TVFs不支持回撤流(带有update和detele),仅支持insert.
                        // 两种解决:
                        // 将SQL转为DataStreamAPI (toChanglogStream),将回撤流处理掉后,再转为SQL进行统计
                        "  TABLE(CUMULATE(TABLE tmp, DESCRIPTOR(rt), INTERVAL '2' MINUTES, INTERVAL '24' HOUR)) " +
                        "GROUP BY " +
                        "  window_start" +
                        "  , window_end" +
                        "  , user"
                )
                .print();

        // 执行作业(程序中使用到了DataStreamAPI,需要执行作业)
        env.execute();
    }
}
