package com.saga.energy.app;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

import javax.annotation.Nullable;

/**
 * @author wdl
 * @date 2022/4/20 19:55
 */
public class EnergyToHive {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        env.enableCheckpointing(10000L, CheckpointingMode.EXACTLY_ONCE);

        System.setProperty("HADOOP_USER_NAME","saga");

        //earliest-offset latest-offset
        tableEnv.executeSql("create table kafka_energy (" +
                "project_id string, " +
                "build_id string, " +
                "date_time string)" +
                "with(" +
                "'connector' = 'kafka'," +
                "'topic' = 'ENERGY_15_MIN'," +
                "'properties.bootstrap.servers' = 'hadoop01:9092'," +
                "'properties.group.id' = 'energy'," +
                "'scan.startup.mode' = 'latest-offset'," +
                "'format' = 'json'," +
                "'json.fail-on-missing-field' = 'false'," +
                "'json.ignore-parse-errors' = 'true'" +
                ")");

//        String hiveConfDir = "";
        String hiveConfDir = "src/main/resources";
        String defaultDatabase = "saga_dw";
        String catalogName = "saga_hive";
        String hiveVersion = "3.1.2";
        HiveCatalog hiveCatalog = new HiveCatalog(catalogName, defaultDatabase, hiveConfDir, hiveVersion);

        // 注册hive的Catalog
        tableEnv.registerCatalog(catalogName ,hiveCatalog);

        // 使用注册的catalog
        tableEnv.useCatalog(catalogName);

        tableEnv.executeSql("insert into ods_energy_15_min select project_id, build_id, date_time, date_time from kafka_energy");

        //ods_energy_15_min
        env.execute();
    }
}
