package com.hh.xx.hive;

import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hive.hcatalog.api.HCatClient;
import org.apache.hive.hcatalog.api.HCatCreateDBDesc;
import org.apache.hive.hcatalog.api.HCatCreateTableDesc;
import org.apache.hive.hcatalog.api.HCatTable;
import org.apache.hive.hcatalog.common.HCatException;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hive.hcatalog.data.schema.HCatSchema;
import org.apache.hive.hcatalog.data.schema.HCatSchemaUtils;

import java.util.Arrays;
import java.util.Collections;
import java.util.List;


/**
 * @author tzp
 * @since 2020/5/8
 */
public class HCatalogThriftClient {
    public static void main(String[] args) throws HCatException {
        HiveConf hiveConf = new HiveConf();
        hiveConf.addResource("/Users/tzp/Documents/env/apache-hive-3.1.2-bin/conf/hive-site.xml");

        //配置了这项, 则使用外部HMS, 否则embed
        hiveConf.set("metastore.thrift.uris", "thrift://localhost:9083");
        String db = "testdb1";
        HCatClient client = HCatClient.create(new Configuration(hiveConf));

//        CREATE EXTERNAL TABLE test_dms_result(
//                        prefix STRING,
//                        id STRING,
//                        xx INT)
//            ROW FORMAT DELIMITED
//            FIELDS TERMINATED BY '\t'
//            LINES TERMINATED BY '\n'
//            STORED AS TEXTFILE
//            LOCATION '/hive-wksp/tmp1';
        {
            HCatTable test2 = new HCatTable("default", "test2");

            HCatFieldSchema prefixFieldSchema = new HCatFieldSchema("prefix",
                    TypeInfoFactory.stringTypeInfo, "沙雕1");
            HCatFieldSchema idFieldSchema = new HCatFieldSchema("id",
                    TypeInfoFactory.stringTypeInfo, "沙雕2");
            HCatFieldSchema xxFieldSchema = new HCatFieldSchema("xx",
                    TypeInfoFactory.intTypeInfo, "沙雕3");

            test2.cols(Arrays.asList(prefixFieldSchema, idFieldSchema, xxFieldSchema));
            test2.fieldsTerminatedBy('\t')
                    .linesTerminatedBy('\n')
                    .fileFormat("textfile")
                    .location("/hive-wksp/tmp1");

            client.createTable(HCatCreateTableDesc.create(test2, true).build());
        }

        // CREATE TABLE test_details_orc( visit_id INT, store_id SMALLINT)
        //  STORED AS ORC
        // LOCATION '/hive-wksp/tmp2';
        {
            HCatTable test3 = new HCatTable("default", "test3");

            String s = "" +
                    "id:string," +
                    "id_type:string," +
                    "spot_id:array<map<string,bigint>>," +
                    "region_small:array<map<string,bigint>>," +
                    "ip:array<map<string,string>>," +
                    "time_interval:map<string,bigint>" +
                    "";
            HCatSchema schema = HCatSchemaUtils.getHCatSchema(s);
            System.out.println(schema);
            test3.cols(schema.getFields());
            test3.fileFormat("orcfile")
                    .location("/hive-wksp/tmp2");

            client.createTable(HCatCreateTableDesc.create(test3, true).build());
        }
        List<String> dbNames = client.listDatabaseNamesByPattern("*");

        System.out.println(dbNames);
        assert dbNames.contains("default");
        assert dbNames.contains(db);
    }
}
