package org.example.batch_write;

import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Apache Hudi与Flink集成示例
 * 本示例演示如何使用Flink创建Hudi表并插入数据
 */
public class HudiFlinkBatchMorWrite {
    public static void main(String[] args) throws Exception {
        System.out.println("开始初始化Flink环境...");

        Configuration config = new Configuration();
        config.setString("rest.bind-port", "8081"); // 设置Web UI端口为8081
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config);
        env.setParallelism(1);
        env.setRuntimeMode(RuntimeExecutionMode.BATCH);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        
        // 设置Hudi表的基本路径
        String basePath = "hudi_user_behavior";

        String hudiCatalog = "-- 创建Hudi Catalog的通用模板\n" +
                "CREATE CATALOG hudi WITH (\n" +
                "  'type' = 'hudi',\n" +
                "  'catalog.path' = 'file:///tmp/hudi/',    -- 指定Catalog根路径 \n" +
                "  'mode' = 'dfs',                          -- 默认'dfs'（或'hive'）\n" +
                "  'default-database' = 'default'           -- 默认数据库设置为default \n" +
                ");";
        System.out.println("准备执行DDL语句：\n" + hudiCatalog);
        tableEnv.executeSql(hudiCatalog);

        String showDatabase = "SHOW DATABASES;";
        System.out.println("准备执行SQL语句：\n" + showDatabase);
        tableEnv.executeSql(showDatabase).print();

        // DROP表（添加IF EXISTS避免报错）
        System.out.println("准备执行DDL语句：");
        String dropTableSQL = "DROP TABLE IF EXISTS `hudi`.`default`.`user_behavior_mor`";
        System.out.println(dropTableSQL);  
        tableEnv.executeSql(dropTableSQL).await();
        
        System.out.println("创建Hudi表...");
        String createTableSQL = "CREATE TABLE `hudi`.`default`.`user_behavior_mor` (\n" +
                "  user_id INT,\n" +
                "  item_id INT,\n" +
                "  category_id INT,\n" +
                "  behavior STRING,\n" +
                "  ts TIMESTAMP(3),\n" +
                "  dt STRING,\n" +
                "  PRIMARY KEY (user_id) NOT ENFORCED\n" +
                ") WITH (\n" +
                "  'connector' = 'hudi',\n" +
                "  'table.type' = 'MERGE_ON_READ',\n" + // 修改为MERGE_ON_READ类型
                "  'hoodie.datasource.write.recordkey.field' = 'user_id',\n" +
                "  'hoodie.datasource.write.partitionpath.field' = 'dt',\n" +
                "  'hoodie.datasource.write.precombine.field' = 'ts',\n" +
                "  'write.operation' = 'upsert',\n" +
                "  'write.tasks' = '1',\n" +
                "  'compaction.tasks' = '1',\n" +
                "  'write.bucket_assign.tasks' = '1'\n" +
                ")";
        System.out.println("准备执行DDL语句：\n" + createTableSQL);
        tableEnv.executeSql(createTableSQL).await();
        
        System.out.println("Hudi表创建成功，开始插入数据...");
        
        // 插入数据到Hudi表
        String insertSQL = "INSERT INTO `hudi`.`default`.`user_behavior_mor` VALUES\n" +
                "(1, 101, 10, '浏览', TIMESTAMP '2023-01-01 12:00:00', '2023-01-01')\n";
        System.out.println("准备执行DML语句：\n" + insertSQL);
        tableEnv.executeSql(insertSQL).await();
        
        System.out.println("数据插入成功，查询表内容...");
        
        // 查询Hudi表中的数据
        String selectSQL = "SELECT * FROM `hudi`.`default`.`user_behavior_mor`";
        System.out.println("准备执行查询语句：\n" + selectSQL);
        tableEnv.executeSql(selectSQL).print();
    }
} 