package com.atguigu.flink.sql.other;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;
//import org.apache.flink.table.catalog.hive.HiveCatalog;

import java.util.Arrays;

/**
 *
 *  读取hive的数据
 *  把表的定义(元数据)存储在Hive的元数据库中。
 *
 */
public class Demo3_HiveCatalog
{
    public static void main(String[] args) {

        EnvironmentSettings environmentSettings = EnvironmentSettings.newInstance().inStreamingMode().build();
        TableEnvironment tableEnvironment = TableEnvironment.create(environmentSettings);


        //1.创建hiveCatalog
        HiveCatalog hiveCatalog = new HiveCatalog("hive", "gmall", "hiveconf");
        //2.注册
        tableEnvironment.registerCatalog("a",hiveCatalog);
        //3.切换
        tableEnvironment.useCatalog("a");

       // System.out.println(Arrays.toString(tableEnvironment.listTables()));


        String createTableSql = " create table t10 (" +
                 " id STRING, ts BIGINT, vc INT  " +
            "   ) with (" +
            " 'connector' = 'filesystem', " +
            " 'path' = 'hdfs://hadoop102:8020/data/ws.json', " +
            " 'format' = 'json'  " +
            ")";

        //tableEnvironment.executeSql(createTableSql);

        //读hive中已经定义的表
       // tableEnvironment.sqlQuery(" select * from gmall.dim_user_zip limit 5   ").execute().print();
        tableEnvironment.sqlQuery(" select * from t10   ").execute().print();

    }
}
