package org.example.tool;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Apache Hudi与Flink集成示例
 * 本示例演示如何使用Flink创建Hudi表并插入数据
 */
public class HudiFlinkBatchQuery {
    public static void main(String[] args) throws Exception {
        System.out.println("开始初始化Flink环境...");
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setRuntimeMode(RuntimeExecutionMode.BATCH);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        
        // 设置Hudi表的基本路径
        String basePath = "hudi_user_behavior";

        String hudiCatalog = "-- 创建Hudi Catalog的通用模板\n" +
                "CREATE CATALOG hudi WITH (\n" +
                "  'type' = 'hudi',\n" +
                "  'catalog.path' = 'file:///tmp/hudi/',    -- 指定Catalog根路径 \n" +
                "  'mode' = 'dfs',                          -- 默认'dfs'（或'hive'）\n" +
                "  'default-database' = 'default'           -- 默认数据库设置为default \n" +
                ");";
        System.out.println("准备执行DDL语句：\n" + hudiCatalog);
        tableEnv.executeSql(hudiCatalog).await();

        String tableCount = "SELECT count(1) FROM `hudi`.`default`.`user_behavior_streaming_mor`;";
        System.out.println("准备执行DML语句：\n" + tableCount);
        tableEnv.executeSql(tableCount).print();
    }
} 