package org.example.append_table;


import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.api.*;

public class LogReader {
    public static void main(String[] args) throws Exception {
        // 配置Flink Web UI端口和并行度
        Configuration config = new Configuration();
        config.setString("rest.bind-port", "8082"); // 设置Web UI端口为8081
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config);
        env.setParallelism(1); // 设置并行度为1
        env.enableCheckpointing(60000); // 设置checkpoint interval为5秒
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(30000); // 最小间隔30秒
        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(3);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(3);

        EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        // 创建Paimon目录
        String createCatalogSQL = "CREATE CATALOG paimon WITH (\n" +
                "    'type' = 'paimon',\n" +
                "    'warehouse' = 'file:///tmp/paimon'\n" +
                ");";
        System.out.println("正在执行SQL: \n" + createCatalogSQL);
        tableEnv.executeSql(createCatalogSQL);

        // 创建流式查询
        // 基于窗口的查询，防止Flink状态过大导致OOM
        // 直接SELECT *打印所有数据
        String querySQL = "SELECT level, COUNT(1) as log_count FROM `paimon`.`default`.`LogTable` GROUP BY level";
        System.out.println("正在执行SQL: \n" + querySQL);

        tableEnv.executeSql(querySQL).print();
    }
} 