package cn.doitedu.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.GenericInMemoryCatalog;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class _21_HiveCatalog_Demo {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 向tenv表环境中注册一个hiveCatalog
        HiveCatalog hiveCatalog = new HiveCatalog("hive", "default", "./hive_conf");
        tenv.registerCatalog("hive_catalog",hiveCatalog);


        GenericInMemoryCatalog defaultCatalog = new GenericInMemoryCatalog("default_catalog");


        //tenv.executeSql("show catalogs").print();

        // 可以在指定的catalog中创建数据库
        // 一旦创建好，这个库、表的元数据就持久化在hive的元数据服务中了
        //tenv.executeSql("create database hive_catalog.doit46_flink");
        tenv.executeSql(
                " create temporary table hive_catalog.doit46_flink.score_kfk_1(  "+
                        "       user_id int,                                     "+
                        "       course string,                                   "+
                        " 	    score double,                                    "+
                        " 	    age int                                    "+
                        "  ) with (                                              "+
                        "       'connector' = 'kafka',                           "+
                        "       'topic' = 'score-test',                          "+
                        "       'properties.bootstrap.servers' = 'doitedu:9092', "+
                        "       'properties.group.id' = 'g003',                  "+
                        "       'scan.startup.mode' = 'latest-offset',           "+
                        "       'value.format' = 'json',                         "+
                        "       'value.fields-include' = 'EXCEPT_KEY'            "+
                        " )                                                      "
        );

        tenv.executeSql("use hive_catalog.doit46_flink");
        tenv.executeSql("desc score_kfk_1").print();

        // 上面的库、表，如果再次创建，则会报错：库、表已存在

    }


}
