package cn.doitedu.demo.catalog;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class HiveCatalog的使用 {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(2000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 先构造一个hiveCatalog的实例对象
        HiveCatalog hiveCatalog = new HiveCatalog("hive", "default", "./conf");
        // 将这个catalog注册到flinkSql的 TableEnvironment
        tenv.registerCatalog("hive_catalog",hiveCatalog);

        // 此时，系统中其实有两个catalog了： InMemoryCatalog ，HiveCatalog
        // 要使用哪个catalog,需要  use 谁;
        // 使用了hiveCatalog之后，flinkSql中能直接看到hive元数据系统中所有库、表
        tenv.executeSql("use catalog hive_catalog");
        tenv.executeSql("show databases").print();
        tenv.executeSql("use dwd");
        tenv.executeSql("show tables").print();


        // 这个建表语句，从此只要执行1次，这个表就存在于hive的元数据中了；
        // 从此如果要对这个表进行查询，不再需要执行建表，而是直接select from
        tenv.executeSql("CREATE TABLE hive_catalog.dwd.flink_t1_kafka      (\n" +
                "  uid BIGINT                                 \n" +
                "  ,event_id STRING                           \n" +
                "  ,action_time BIGINT                        \n" +
                "  ,rt1 as to_timestamp_ltz(action_time,3)    \n" +
                "  ,watermark for rt1 as rt1                  \n" +
                ") WITH (                                    \n" +
                "  'connector' = 'kafka',                    \n" +
                "  'topic' = 'a-1',                         \n" +
                "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "  'properties.group.id' = 'doit44_g1',      \n" +
                "  'scan.startup.mode' = 'latest-offset',    \n" +
                "  'value.format' = 'json',                  \n" +
                "  'value.fields-include' = 'EXCEPT_KEY'     \n" +
                ")");

        tenv.executeSql("show tables").print();


        tenv.executeSql("select * from hive_catalog.dwd.flink_t1_kafka").print();


    }

}
