package streaming.demo.mq.kafka;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;
import streaming.demo.mq.kafka.config.MyPartitioner;
import streaming.demo.mq.kafka.config.MySchema;
import utils.PropertiesReader;

import java.util.Properties;

/**
 * 数据来源Kafka,输出到Kafka
 * PS: Kafka数据格式-JSON格式 方式-TableAPI/FLinkSQL
 * kafka -> kafka
 * 数据来源： kafka topic[csvTest1]
 * Sink目标： kafka topic[csvTest2]
 */
public class KafkaSinkKafka01_1 {

    private static String kafkaServers = PropertiesReader.get("default.kafka.servers");
    private static String topicFrom = PropertiesReader.get("default.kafka.topic.csv.A");
    private static String topicTo = PropertiesReader.get("default.kafka.topic.csv.B");
    private static String kafkaFormat = "json";

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        Kafka kafkaForm = new Kafka().version("universal").topic(topicFrom).property("bootstrap.servers", kafkaServers);
        Schema schema1 = new Schema()
                .field("k_id", DataTypes.STRING())
                .field("k_type", DataTypes.STRING())
                .field("k_val", DataTypes.STRING());
        tEnv.connect(kafkaForm)
                .withFormat(new Json())
                .withSchema(schema1)
                .inAppendMode()
                .createTemporaryTable("inputTable");
        Table inputTable = tEnv.sqlQuery("select k_id, k_type, k_val from inputTable ");
        inputTable.printSchema();

        Kafka kafkaTo = new Kafka().version("universal").topic(topicTo).property("bootstrap.servers", kafkaServers);
        Schema schema2 = new Schema()
                .field("tg_id", DataTypes.STRING())
                .field("tg_type", DataTypes.STRING())
                .field("tg_val", DataTypes.STRING());
        tEnv.connect(kafkaTo)
                .withFormat(new Json())
                .withSchema(schema2)
                .inAppendMode()
                .createTemporaryTable("outputTable");
        Table outputTable = tEnv.sqlQuery("select tg_id, tg_type, tg_val from outputTable ");
        outputTable.printSchema();

        tEnv.executeSql("insert into outputTable(tg_id,tg_type,tg_val) select k_id, k_type, k_val from inputTable");

    }

}
