package com.example.yckjbigdataflink.sink;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class FlinkHiveSink {

    public static void main(String[] args) throws Exception {
        // 创建表环境，支持Blink Planner
        EnvironmentSettings settings = EnvironmentSettings.newInstance().inStreamingMode().build();
        TableEnvironment tableEnv = TableEnvironment.create(settings);

        // 配置Hive catalog
        String name = "myhive";
        String defaultDatabase = "default";
        String hiveConfDir = "/path/to/hive/conf";  // hive-site.xml所在路径
        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
        tableEnv.registerCatalog("myhive", hive);
        tableEnv.useCatalog("myhive");

        // 创建Hive表（如果未创建）
        tableEnv.executeSql(
                "CREATE TABLE IF NOT EXISTS user_events (\n" +
                        "  user_id STRING,\n" +
                        "  event_time TIMESTAMP,\n" +
                        "  event_type STRING\n" +
                        ") PARTITIONED BY (dt STRING) STORED AS ORC"
        );

        // 查询数据后写入Hive（这里假设你已经有数据表或者转换）
        TableResult result = tableEnv.executeSql(
                "INSERT INTO user_events PARTITION (dt='2025-06-10') " +
                        "SELECT user_id, event_time, event_type FROM some_source_table"
        );

        result.getJobClient().get().getJobExecutionResult().get();
    }
}

