package com.atguigu.chapter11;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.Catalog;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @ClassName: Flink11_Group_Window_Use
 * @Description:
 * @Author: kele
 * @Date: 2021/4/13 12:39
 * <p>
 *
 *  hiveCatalog:
 *      1、flink元数据的存储，
 *      2、读写现有的hive元数据接口
 *
 *
 **/
public class Flink13_Hive_Catalog {

    public static void main(String[] args) {

        Configuration conf = new Configuration();
        conf.setInteger("rest.port",20000);

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);

        env.setParallelism(1);

        /**
         *
         */
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        String cataLogName = "hive_catalog";

        String defaultDatabase = "flink_demo";

        String path = "in";

        /**
         * 2、Catalog是抽象类，需要new一个HiveCatalog的实现方法
         *
         * 参数 1：catalog的名字，后面需要调用
         * 参数 2：默认的库的名称
         * 参数 3：hive-site.xml所在的路径
         */
        Catalog catalog = new HiveCatalog(cataLogName,defaultDatabase,path);

        //1、通过table环境注册Catalog，需要一个catalog
        tenv.registerCatalog(cataLogName,catalog);

        //3、配置在table环境中使用的catalog
        tenv.useCatalog(cataLogName);
        tenv.useDatabase(defaultDatabase);

        tenv.sqlQuery("select * from stu").execute().print();


    }


}
