package com.intct.flink.study;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @author gufg
 * @since 2025-10-14 14:08
 */
public class SQLTest11 {
    public static void main(String[] args) {
        // 配置属性
        Configuration conf = new Configuration();
        conf.set(RestOptions.BIND_PORT, "8081");

        // 1 获取取环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        // 并行度
        env.setParallelism(1);
        // 开启检点
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        // 获取SQL执行环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 映射mysql的订单
//        tenv.executeSql(
//                "create table flink_test01( " +
//                            // 如果是Flink sql的关键字，需要用反引号转义
//                            "id              INT " +
//                            ",name           STRING " +
//                            ",update_time    TIMESTAMP(3) " +
//                            ",rt AS update_time           " +
//                            ",watermark for rt as rt - interval '0' second " +
//                            ",PRIMARY KEY (id) NOT ENFORCED " +
//                        ") with ( " +
//                            "  'connector' = 'mysql-cdc' " +
//                            "  ,'hostname' = 'cdh-node' " +
//                            "  ,'port' = '13306' " +
//                            "  ,'username' = 'root' " +
//                            "  ,'password' = 'Test_090110' " +
//                            "  ,'database-name' = 'm1' " +
//                            "  ,'table-name' = 'test01' " +
//                        ") ");

        tenv.executeSql("CREATE TABLE flink_test01 (" +
                "  user_id BIGINT," +
                "  page_id BIGINT," +
                "  viewtime timestamp(3), " +
                "  rt AS viewtime, " +
                "  watermark for rt as rt - interval '0' second " +
                ") WITH (" +
                " 'connector' = 'kafka'," +
                " 'topic' = 'pageviews'," +
                " 'properties.bootstrap.servers' = 'cdh-node:9092'," +
                " 'properties.group.id' = 'pageviews_a'," +
                " 'scan.startup.mode' = 'earliest-offset'," +
                " 'format' = 'json'" +
                ")");

        tenv.executeSql(
                "select " +
                        "    window_start " +
                        "    ,window_end " +
                        "    ,sum(user_id)  " +
                        "from  " +
                        // Windowing TVFs不支持回撤流(带有update和detele),仅支持insert.
                        // 两种解决:
                        // 1 带有回撤流放Kafka,窗口统计再去Kafka获取
                        // 2 将SQL转为DataStreamAPI (toChanglogStream),将回撤流处理掉后,再转为SQL进行统计
                        "  TABLE(CUMULATE(TABLE flink_test01, DESCRIPTOR(rt), INTERVAL '2' MINUTES, INTERVAL '10' MINUTES)) " +
                        "GROUP BY " +
                        "  window_start, " +
                        "  window_end").print();
    }
}
