package com.atguigu.day10;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Flink09_KafkaToKafka {
    public static void main(String[] args) {
        //1.获取相关环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 创建一张表连接到kafkatopic 消费kafka数据
        tableEnv.executeSql("CREATE TABLE sourceKafka (\n" +
                "  `id` String,\n" +
                "  `ts` BIGINT,\n" +
                "  `vc` int\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'topic_source_sensor',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'testGroup',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'format' = 'csv'\n" +
                ")");

        //TODO 创建一张表连接到kafkatopic 往kafka写数据
        tableEnv.executeSql("CREATE TABLE sinkKafka (\n" +
                "  `id` String,\n" +
                "  `ts` BIGINT,\n" +
                "  `vc` int\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'topic_sink_sensor',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'format' = 'csv'\n" +
                ")");

        //将数据从一个topic写入另一个topic
        tableEnv.executeSql("insert into sinkKafka select * from sourceKafka where id = 'sensor_1'");

    }
}
