package com.atguigu.flink.sql.catalog;

import org.apache.flink.connector.jdbc.catalog.MySqlCatalog;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

import java.util.Arrays;

/**
 * Created by Smexy on 2023/4/14
 *
 *
 *  HiveCatalog ：
 *          读取hive中已经定义的表。
 *          把新建的表，存储到hive的元数据中。
 *
 *   借助 hive metastore service服务去读取！
 *      启动服务。
 *      在pom中添加连接hive的连接器
 *      准备hive-site.xml配置文件，去连接 元数据服务
 *
 *
 */
public class Demo3_HiveCatalog
{
    public static void main(String[] args) {

        EnvironmentSettings environmentSettings = EnvironmentSettings.newInstance().inStreamingMode().build();

        TableEnvironment tableEnvironment = TableEnvironment.create(environmentSettings);

       //创建hivecatalog HiveCatalog(String catalogName, @Nullable String defaultDatabase, @Nullable String hiveConfDir)
        HiveCatalog hiveCatalog = new HiveCatalog("hive", "default", "hiveconf");

        //切换Catalog，
        tableEnvironment.registerCatalog("aa",hiveCatalog);
        tableEnvironment.useCatalog("aa");

        //打印数组最优雅的方式  获取目录中的表
        System.out.println(Arrays.toString(tableEnvironment.listTables()));


       String createTableSql = " create table t2 ( id STRING, ts BIGINT , vc INT " +
            "    )WITH (" +
            "  'connector' = 'filesystem'," +
            "  'path' = 'data/sensor.json'," +
            "  'format' = 'json'" +
            ") ";

        //tableEnvironment.executeSql(createTableSql);

        //tableEnvironment.sqlQuery(" select * from t_order_mt   ").execute().print();
        tableEnvironment.sqlQuery(" select * from t2   ").execute().print();

    }
}
