package com.saga.energy.app;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @author wdl
 * @date 2022/4/22 17:13
 */
public class HiveConnectDemo {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        env.enableCheckpointing(10000L, CheckpointingMode.EXACTLY_ONCE);

        System.setProperty("HADOOP_USER_NAME","saga");

        String name = "myHive";      // Catalog名称，定义一个唯一的名称表示
        String defaultDatabase = "saga_dw";  // 默认数据库名称
        String hiveConfDir = "/opt/module/hive-3.1.2/conf";  // hive-site.xml路径

        String version = "3.1.2";       // Hive版本号
//        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);
//
//        tableEnv.registerCatalog(name, hive);
//        tableEnv.useCatalog(name);
//        tableEnv.useDatabase(defaultDatabase);

        //earliest-offset latest-offset
        tableEnv.executeSql("create table kafka_energy (" +
                "project_id string, " +
                "build_id string, " +
                "date_time string)" +
                "with(" +
                "'connector' = 'kafka'," +
                "'topic' = 'ENERGY_15_MIN'," +
                "'properties.bootstrap.servers' = 'hadoop01:9092'," +
                "'properties.group.id' = 'energy'," +
                "'scan.startup.mode' = 'latest-offset'," +
                "'format' = 'json'," +
                "'json.fail-on-missing-field' = 'false'," +
                "'json.ignore-parse-errors' = 'true'" +
                ")");

        tableEnv.executeSql("select * from kafka_energy").print();

//        tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
//        tableEnv.executeSql("CREATE TABLE IF NOT EXISTS ods_energy_15 ( " +
//                "project_id string, " +
//                "build_id string, " +
//                "date_time string" +
//                ")" +
//                "PARTITIONED BY (dt string ) " +
//                "STORED AS orc TBLPROPERTIES ( " +
//                "'partition.time-extractor.timestamp-pattern'='$dt', " +
//                "'sink.partition-commit.delay'='0s', " +
//                "'sink.partition-commit.delay'='0s'," +
//                "'sink.partition-commit.policy.kind'='metastore,success-file', " +
//                "'auto-compaction'='true'," +
//                "'compaction.file-size'='128MB'" +
//                ")");
//
//        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
//
//        tableEnv.executeSql("insert into ods_energy_15 select project_id, build_id, date_time, date_time from kafka_energy");

    }
}
