package com.atguigu.day10;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class FlinkSQL09_SQL_Kafka {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.使用DDL的方式从Kafka读取数据
        tableEnv.executeSql("create table sensorSource(id string,ts bigint,vc double) " +
                "with (" +
                "'connector.type' = 'kafka'," +
                "'connector.version' = 'universal'," +
                "'connector.topic' = 'test'," +
                "'connector.properties.bootstrap.servers' = 'hadoop102:9092'," +
                "'connector.properties.group.id' = 'bigdata1109'," +
                "'format.type' = 'json'"
                + ")");

        //3.过滤数据
        Table table = tableEnv.sqlQuery("select * from sensorSource where id = 'ws_001'");

        //4.使用DDL的方式将数据写入Kafka
        tableEnv.executeSql("create table sensorSink(id string,ts bigint,vc double) " +
                "with (" +
                "'connector.type' = 'kafka'," +
                "'connector.version' = 'universal'," +
                "'connector.topic' = 'test'," +
                "'connector.properties.bootstrap.servers' = 'hadoop102:9092'," +
                "'connector.properties.group.id' = 'bigdata1109'," +
                "'format.type' = 'json'"
                + ")");
        table.executeInsert("sensorSink");

        //5.启动任务
        env.execute();

    }

}
