package hudi;

import org.apache.commons.io.FileUtils;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;

/**
 * @desc
 * @Author xuyouchang
 * @Date 2021/10/21
 */
public class Kafka2Hudi {
    public static void main(String[] args) throws IOException {
        String kafkaSql = FileUtils.readFileToString(new File(args[0]), StandardCharsets.UTF_8);
        String hudiSql = FileUtils.readFileToString(new File(args[1]), StandardCharsets.UTF_8);

        Configuration configuration = new Configuration();

        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(configuration);
        streamExecutionEnvironment.enableCheckpointing(30000L);
        streamExecutionEnvironment.setStateBackend(new HashMapStateBackend());
        streamExecutionEnvironment.getCheckpointConfig().setCheckpointStorage("hdfs://172.17.1.102:8020/xyc/state");
        StreamTableEnvironment bsTableEnv = StreamTableEnvironment.create(streamExecutionEnvironment);

        bsTableEnv.executeSql(kafkaSql);
        bsTableEnv.executeSql(hudiSql);

        Table table2 = bsTableEnv.sqlQuery("select * from kafka_table");
        table2.executeInsert("hudi_table");

    }
}
