package com.bigdata;

//import lombok.extern.log4j.Log4j;

import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.RetryingMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Catalog;
import org.apache.hadoop.hive.metastore.api.MetaException;

/**
 * description
 *
 * @author Cyber
 * <p> Created By 2025/3/15
 * @version 1.0
 */
@Slf4j
public class HmsClient {

    /**
     * 初始化HMS连接
     *
     * @param conf org.apache.hadoop.conf.Configuration
     * @return IMetaStoreClient
     * @throws MetaException 异常
     */
    public static IMetaStoreClient init(Configuration conf) throws MetaException {
        try {
            return RetryingMetaStoreClient.getProxy(conf, false);
        } catch (MetaException e) {
            log.error("hms连接失败", e);
            throw e;
        }
    }

    public static void main(String[] args) throws org.apache.thrift.TException {
        log.info(" HmsClient start ...");
        Configuration conf = new Configuration();
        // 通过"hive.metastore.uris"参数提供HMS连接信息
        conf.set("hive.metastore.uris", "thrift://master:9083");
        // 通过hive-site.xml方式提供HMS连接信息
        // conf.addResource("hive-site.xml");
        IMetaStoreClient client = HmsClient.init(conf);

        String catalogName = "mycatalog";

        Catalog catalog = new Catalog(catalogName, "hdfs://master:9000/user/mycatalog/warehouse");
        try {
            client.dropCatalog(catalogName);
            client.createCatalog(catalog);
            log.error("Catalog " + catalogName + " created successfully");
            client.getAllDatabases("mycatalog").forEach(System.out::println);
        } catch (org.apache.thrift.TException e) {
            log.error("创建Catalog失败", e);
        }

        log.info("----------------------------获取所有catalogs-------------------------------------");
        client.getCatalogs().forEach(log::info);

        log.info("------------------------获取catalog为hive的描述信息--------------------------------");
        log.info(client.getCatalog(catalogName).toString());

        log.info("--------------------获取catalog为hive的所有database-------------------------------");
        client.getAllDatabases(catalogName).forEach(log::info);

        log.info("---------------获取catalog为hive，database为myhive的描述信息--------------------------");
        log.info(client.getDatabase(catalogName, "default").toString());

        log.info("-----------获取catalog为hive，database名为myhive下的所有表--------------------");
        client.getTables(catalogName, "myhive", "*").forEach(System.out::println);

        log.info("------获取catalog为hive，database名为myhive，表名为t_education_level的描述信息-----");
        log.info(client.getTable(catalogName, "myhive", "t_education_level").toString());

        log.info("---------------- 获取表分区信息 --------------------");
        client.listPartitions(catalogName, "myhive", "t_education_level", -1).forEach((partition -> log.info(partition.toString())));

        log.info("---------------- 获取DB下函数信息 --------------------");
        client.getFunctions(catalogName, "myhive", "*").forEach(log::info);
    }
}
