package com.zyx.flinkdemo.sql.catalog;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

/**
 * @author zyx
 * @since 2021/8/3 23:24
 * desc: Kafka读取数据写入Hive案例
 */
public class KafkaToHiveDemo {

    public static void main(String[] args) {
        // 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        // 2.创建HiveCatalog
        HiveCatalog hiveCatalog = new HiveCatalog("myHive", "testdb", "input/hive");

        // 3.注册HiveCatalog
        tableEnv.registerCatalog("myHive", hiveCatalog);

        // 4.使用HiveCatalog
        tableEnv.useCatalog("myHive");

        String createKafkaSourceSql = "CREATE TABLE kafkaSourceTable(\n" +
                "    `name` STRING, \n" +
                "    `age` INT\n" +
                ")";

    }

    public static String getCreateKafkaSourceTableSql(String kafkaTopic,
                                                      String kafkaBootstrapServers,
                                                      String kafkaGroupId,
                                                      String kafkaFormat,
                                                      String scanStartUpMode,
                                                      String createKafkaSourceSql) {
        return createKafkaSourceSql + " WITH ( "
                +" 'connector' = 'kafka' ,"
                + " 'topic' = '" + kafkaTopic + "',"
                + " 'properties.bootstrap.servers' = '" + kafkaBootstrapServers + "',"
                + " 'properties.group.id' = '" + kafkaGroupId + "',"
                + " 'format' = '" + kafkaFormat + "',"
                + " 'scan.startup.mode' = '" + scanStartUpMode + "',"
                + " 'json.fail-on-missing-field' = 'false',"
                + " 'json.ignore-parse-errors' = 'true' )";
    }

}
