package com.zyx.flinkdemo.sql.basic;

import com.zyx.flinkdemo.sql.cons.CommonConfig;
import com.zyx.flinkdemo.sql.utils.ConnectUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author zyx
 * @since 2021/5/29 07:10
 * desc: 使用FlinkSql将Kafka当前主题的数据发送到另一个主题中
 */
public class KafkaToKafkaDemo {
    public static void main(String[] args) {
        // 创建流环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 创建表环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 创建KafkaSource表
        String fields = "id string," +
                "ts bigint," +
                "vc integer";
        tableEnv.executeSql(ConnectUtils.getNormalKafkaSourceConnect("sensor_source", fields,
                "sensor_source", CommonConfig.KAFKA_SERVER,
                "src_consumer", "latest-offset", "csv"));
        // 创建KafkaSink表
        tableEnv.executeSql(ConnectUtils.getNormalKafkaSinkConnect("sensor_sink", fields,
                "sensor_sink", CommonConfig.KAFKA_SERVER, "latest-offset", "json"));
        // 使用Sql将Source表中数据发送到Sink表中
        tableEnv.executeSql("insert into sensor_sink select * from sensor_source");
    }
}
