package org.example.streaming_write;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Apache Hudi与Flink集成示例
 * 本示例演示如何使用Flink创建Hudi表并插入数据
 */
public class HudiFlinkStreamingMorWrite {
    public static void main(String[] args) throws Exception {
        System.out.println("开始初始化Flink环境...");

        Configuration config = new Configuration();
        config.setString("rest.bind-port", "8081"); // 设置Web UI端口为8081
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config);
        env.setParallelism(1);
        env.setRuntimeMode(RuntimeExecutionMode.STREAMING); // 修改为流处理模式
        
        // 配置checkpoint
        env.enableCheckpointing(10000);
        env.getCheckpointConfig().setCheckpointTimeout(120000); // checkpoint超时时间为120秒
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(30000); // 两次checkpoint之间至少间隔30秒
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1); // 同一时间只允许一个checkpoint进行
        env.getCheckpointConfig().setTolerableCheckpointFailureNumber(3); // 允许checkpoint失败的次数
        
        // 添加文件系统checkpoint存储配置
        env.getCheckpointConfig().setCheckpointStorage("file:///tmp/flink/checkpoints");
        System.out.println("已配置文件系统Checkpoint存储路径：file:///tmp/flink/checkpoints");

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        String hudiCatalog = "-- 创建Hudi Catalog的通用模板\n" +
                "CREATE CATALOG hudi WITH (\n" +
                "  'type' = 'hudi',\n" +
                "  'catalog.path' = 'file:///tmp/hudi/',    -- 指定Catalog根路径 \n" +
                "  'mode' = 'dfs',                          -- 默认'dfs'（或'hive'）\n" +
                "  'default-database' = 'default'           -- 默认数据库设置为default \n" +
                ");";
        System.out.println("准备执行DDL语句：\n" + hudiCatalog);
        tableEnv.executeSql(hudiCatalog);

        String showDatabase = "SHOW DATABASES;";
        System.out.println("准备执行SQL语句：\n" + showDatabase);
        tableEnv.executeSql(showDatabase).print();

        // DROP表（添加IF EXISTS避免报错）
        System.out.println("准备执行DDL语句：");
        String dropTableSQL = "DROP TABLE IF EXISTS `hudi`.`default`.`user_behavior_streaming_mor`";
        System.out.println(dropTableSQL);  
        tableEnv.executeSql(dropTableSQL).await();
        
        System.out.println("创建Hudi表...");
        String createTableSQL = "CREATE TABLE `hudi`.`default`.`user_behavior_streaming_mor` (\n" +
                "  user_id INT,\n" +
                "  item_id INT,\n" +
                "  category_id INT,\n" +
                "  behavior STRING,\n" +
                "  ts TIMESTAMP(3),\n" +
                "  dt STRING,\n" +
                "  PRIMARY KEY (user_id) NOT ENFORCED\n" +
                ") WITH (\n" +
                "  'connector' = 'hudi',\n" +
                "  'table.type' = 'MERGE_ON_READ',\n" + // 可选: COPY_ON_WRITE或MERGE_ON_READ
                "  'hoodie.datasource.write.recordkey.field' = 'user_id',\n" +
                "  'hoodie.datasource.write.partitionpath.field' = 'dt',\n" +
                "  'hoodie.datasource.write.precombine.field' = 'ts',\n" +
                "  'write.operation' = 'upsert',\n" +
                "  'write.tasks' = '5',\n" +
                "  'compaction.tasks' = '5',\n" +
                "  'write.bucket_assign.tasks' = '5'\n" +
                ")";
        System.out.println("准备执行DDL语句：\n" + createTableSQL);
        tableEnv.executeSql(createTableSQL).await();
        
        // 创建DataGen数据源表
        System.out.println("创建DataGen数据源表...");
        String createDataGenTableSQL = "CREATE TABLE source_user_behavior (\n" +
                "  user_id INT,\n" +
                "  item_id INT,\n" +
                "  category_id INT,\n" +
                "  behavior STRING,\n" +
                "  ts TIMESTAMP(3),\n" +
                "  dt AS '2025-03-17'\n" +
                ") WITH (\n" +
                "  'connector' = 'datagen',\n" +
                "  'rows-per-second' = '1',\n" +
                "  'fields.user_id.kind' = 'sequence',\n" +
                "  'fields.user_id.start' = '1',\n" +
                "  'fields.user_id.end' = '1000000',\n" +
                "  'fields.item_id.kind' = 'random',\n" +
                "  'fields.item_id.min' = '100',\n" +
                "  'fields.item_id.max' = '100000',\n" +
                "  'fields.category_id.kind' = 'random',\n" +
                "  'fields.category_id.min' = '1',\n" +
                "  'fields.category_id.max' = '500000',\n" +
                "  'fields.behavior.kind' = 'random',\n" +
                "  'fields.behavior.length' = '10',\n" +
                "  'fields.ts.kind' = 'random'\n" +
                ")";
        System.out.println("准备执行DDL语句：\n" + createDataGenTableSQL);
        tableEnv.executeSql(createDataGenTableSQL).await();
        
        System.out.println("DataGen表创建成功，开始将数据写入Hudi表...");
        
        // 从DataGen表中查询数据并写入Hudi表
        String insertFromDataGenSQL = "INSERT INTO `hudi`.`default`.`user_behavior_streaming_mor` " +
                "SELECT user_id, item_id, category_id, behavior, ts, dt FROM source_user_behavior";
        System.out.println("准备执行DML语句：\n" + insertFromDataGenSQL);
        tableEnv.executeSql(insertFromDataGenSQL);

        System.out.println("开始执行流处理任务...");
        env.execute("Hudi Streaming COW Write Job");
    }
} 