package streaming.api.tableapi;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

/**
 * 从kafka读取数据 (connect方法)
 * 输出到kafka (connect方法)
 */
public class TableTest5_kafka {

    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        String topicFrom = "myTest";
        String topicTo = "myTest2";
        String kafkaServer = "192.168.36.130:9092,192.168.36.129:9092,192.168.36.128:9092";

        // 2. 连接Kafka，读取数据
        Kafka kafkaForm = new Kafka().version("universal").topic(topicFrom).property("bootstrap.servers", kafkaServer);
        Schema schema1 = new Schema()
                .field("id", DataTypes.STRING())
                .field("timestamp", DataTypes.BIGINT())
                .field("temp", DataTypes.DOUBLE());
        tableEnv.connect(kafkaForm)
                .withFormat(new Json())
                .withSchema(schema1)
                .inAppendMode()
                .createTemporaryTable("inputTable");
        // 3. 查询转换
        Table resultTable = tableEnv.sqlQuery("select id, `timestamp`, temp from inputTable ");
        // 打印查看
        tableEnv.toRetractStream(resultTable, Row.class).print("resultTable:");

        Kafka kafkaTo = new Kafka().version("universal").topic(topicTo).property("bootstrap.servers", kafkaServer);
        Schema schema2 = new Schema()
                .field("id", DataTypes.STRING())
                .field("ts", DataTypes.BIGINT())
                .field("temp", DataTypes.DOUBLE());
        tableEnv.connect(kafkaTo)
                .withFormat(new Json())
                .withSchema(schema2)
                .inAppendMode()
                .createTemporaryTable("outputTable");
        // 4.Insert 输出
        resultTable.executeInsert("outputTable");

    }

}
