package com.lzwk.utils;

import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @Author: CC
 * @Date: 2022/2/18 16:20
 */
public class HiveUtil {
    public static void initHiveCatalog(StreamTableEnvironment tableEnv) {
        String name = "hive";
        String defaultDatabase = "warehouse";
        String hiveConfDir = "/usr/local/service/hive/conf";
        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir);
        tableEnv.registerCatalog("hive", hive);
    }

    //拼接Kafka相关属性到DDL(sink)
    public static String getFlink2HiveDDL(String table_path) {
        return  "        PARTITIONED BY (dt STRING, hr STRING)\n" +
                "        STORED AS orc\n" +
                "        LOCATION 'cosn://earth-1253442168/warehouse_n/"+table_path+"'\n" +
                "        TBLPROPERTIES (\n" +
                "            'partition.time-extractor.timestamp-pattern'='$dt $hr:00:00',\n" +
                "            'sink.partition-commit.trigger'='partition-time',\n" +
                "            'sink.partition-commit.delay'='1 min',\n" +
                "            'sink.partition-commit.policy.kind'='metastore,success-file',\n" +
                "            'auto-compaction' = 'true',\n" +
                "            'compaction.file-size' = '128mb' ";
    }
}
