package cn.doitedu.sql;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class Demo19_HiveCatalogUse {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        // 构造一个hive catalog的实例对象
        HiveCatalog hiveCatalog = new HiveCatalog("hive", "default", "./hiveconf");

        // 将catalog实例对象，注册到表环境
        tenv.registerCatalog("hive_catalog",hiveCatalog);

        tenv.executeSql("show catalogs").print();


        // 建库，指定catalog
        tenv.executeSql("create database hive_catalog.doit47_ods");

        // 建表，指定catalog
        tenv.executeSql(
                        "create table if not exists hive_catalog.doit47_ods.user_events_kafka(   " +
                        "     uid bigint,                     " +
                        "     event_id string,                " +
                        "     properties map<string,string>,  " +
                        "     action_time bigint              " +
                        ") with (                             " +
                        "  'connector' = 'kafka',             " +
                        "  'topic' = 'tpc-a',                 " +
                        "  'properties.bootstrap.servers' = 'doitedu:9092', " +
                        "  'properties.group.id' = 'doit47-g2',\n" +
                        "  'scan.startup.mode' = 'latest-offset',\n" +
                        "  'value.format' = 'json',\n" +
                        "  'value.fields-include' = 'EXCEPT_KEY'\n" +
                        ")");


        tenv.executeSql("select * from hive_catalog.doit47_ods.user_events_kafka").print();

    }
}
