package streaming.api.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import utils.PropertiesReader;

public class SQLTest5_es1 {

    private static String kafkaServers = PropertiesReader.get("default.kafka.servers");
    private static String topicFrom = "csvTest1";
    private static String kafkaFormat = "csv";
    private static String csvSeparator = ",";

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        StringBuffer sb1 = new StringBuffer();
        sb1.append("CREATE TABLE inputTable (");
        sb1.append("k_id STRING, k_type STRING, k_val STRING");
        sb1.append(") WITH (");
        sb1.append("'connector' = 'kafka',");
        sb1.append("'topic' = '").append(topicFrom).append("',");
        sb1.append("'properties.bootstrap.servers' = '").append(kafkaServers).append("',");
        sb1.append("'format' = '").append(kafkaFormat).append("',");
        if ("csv".equals(kafkaFormat)) {
            sb1.append("'csv.field-delimiter' = '").append(csvSeparator).append("',");
        }
        sb1.deleteCharAt(sb1.length() - 1);
        sb1.append(")");

        System.out.println("inputTable SQL: " + sb1.toString());
        tEnv.executeSql(sb1.toString());

        Table inputTable = tEnv.sqlQuery("select k_id, k_type, k_val from inputTable ");
        inputTable.printSchema();
    }
}
