package cn.doitedu.sql;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class Demo20_HiveCatalogUseTest {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        // 构造一个hive catalog的实例对象
        HiveCatalog hiveCatalog = new HiveCatalog("hive", "default", "./hiveconf");

        // 将catalog实例对象，注册到表环境
        tenv.registerCatalog("hive_catalog",hiveCatalog);


        // 切换当前的catalog
        tenv.executeSql("use catalog hive_catalog");
        // 切换当前的库
        tenv.executeSql("use database doit47_ods");


        tenv.executeSql("select * from user_events_kafka").print();




    }
}
