package streaming.api.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import utils.PropertiesReader;

public class SQLTest2_kafka1 {

    private static String kafkaServers = PropertiesReader.get("default.kafka.servers");
    private static String topicFrom = PropertiesReader.get("default.kafka.topic.csv.A");
    private static String topicTo = PropertiesReader.get("default.kafka.topic.csv.B");
    private static String kafkaFormat = "csv";
    private static String csvSeparator = ",";

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        StringBuffer sb1 = new StringBuffer();
        sb1.append("CREATE TABLE inputTable (");
        sb1.append("k_id STRING, k_type STRING, k_val STRING");
        sb1.append(") WITH (");
        sb1.append("'connector' = 'kafka',");
        sb1.append("'topic' = '").append(topicFrom).append("',");
        sb1.append("'properties.bootstrap.servers' = '").append(kafkaServers).append("',");
        sb1.append("'format' = '").append(kafkaFormat).append("',");
        if ("csv".equals(kafkaFormat)) {
            sb1.append("'csv.field-delimiter' = '").append(csvSeparator).append("',");
        }
        sb1.deleteCharAt(sb1.length() - 1);
        sb1.append(")");

        System.out.println("inputTable SQL: " + sb1.toString());
        tEnv.executeSql(sb1.toString());

        Table inputTable = tEnv.sqlQuery("select k_id, k_type, k_val from inputTable ");
        inputTable.printSchema();

        StringBuffer sb2 = new StringBuffer();
        sb2.append("CREATE TABLE outputTable (");
        sb2.append("tg_id STRING, tg_type STRING, tg_val STRING");
        sb2.append(") WITH (");
        sb2.append("'connector' = 'kafka', ");
        sb2.append("'topic' = '").append(topicTo).append("', ");
        sb2.append("'properties.bootstrap.servers' = '").append(kafkaServers).append("', ");
        sb2.append("'format' = '").append(kafkaFormat).append("',");
        if ("csv".equals(kafkaFormat)) {
            sb2.append("'csv.field-delimiter' = '").append(csvSeparator).append("',");
        }
        sb2.deleteCharAt(sb2.length() - 1);
        sb2.append(")");
        System.out.println("outputTable SQL: " + sb2.toString());
        tEnv.executeSql(sb2.toString());

        Table outputTable = tEnv.sqlQuery("select tg_id, tg_type, tg_val from outputTable ");
        outputTable.printSchema();

        tEnv.executeSql("insert into outputTable(tg_id,tg_type,tg_val) select k_id, k_type, k_val from inputTable");

    }
}
