package com.atguigu.flinkSql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @author wky
 * @create 2021-07-21-11:40
 */
//从kafka 读取数据过滤后到kafka
public class Flink09_Sql_KafkaTokafka {
    public static void main(String[] args) {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        environment.setParallelism(1);
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(environment);
        //1 注册source kafka
        tableEnvironment.executeSql("create table source_sensor (id string, ts bigint, vc int) with ("
                + "'connector' = 'kafka',"
                + "'topic' = 'topic_source_sensor',"
                + "'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092',"
                + "'properties.group.id' = 'atguigu',"
                + "'scan.startup.mode' = 'latest-offset',"
                + "'format' = 'csv'"
                + ")");
        //2 注册 sink kafka
        tableEnvironment.executeSql("create table sink_sensor(id string, ts bigint, vc int) with(" +
                "'connector' = 'kafka'," +
                "'topic' = 'topic_sink_sensor'," +
                "'properties.bootstrap.servers' = 'hadoop102:9092,hadoop103:9092,hadoop104:9092'," +
                "'format' = 'csv'" +
                ")");
        //从source 读取到 sink
        tableEnvironment.executeSql("insert into sink_sensor select * from source_sensor where id = 's1'");

    }
}
