package com.atguigu.chapter11;

import com.atguigu.chapter05.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.time.Duration;

/**
 * TODO
 *
 * @author cjp
 * @version 1.0
 * @date 2021/3/12 9:30
 */
public class Flink09_SQL_KafkaToKafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        // SQL的方式对接外部系统
        // 当成 source
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 使用SQL语法，关联 Kafka的 Topic，作为 数据源表
        tableEnv.executeSql("create table sensor_source (id string,ts bigint,vc int) with (" +
                "  'connector' = 'kafka'," +
                "  'topic' = 'topic_source'," +
                "  'properties.bootstrap.servers' = 'hadoop102:9092'," +
                "  'properties.group.id' = 'testGroup'," +
                "  'scan.startup.mode' = 'earliest-offset'," +
                "  'format' = 'csv')");


        // 使用SQL语法，关联 Kafka的 Topic，作为 输出表
        tableEnv.executeSql("create table sensor_sink (id string,ts bigint,vc int) with (" +
                "  'connector' = 'kafka'," +
                "  'topic' = 'topic_sink'," +
                "  'properties.bootstrap.servers' = 'hadoop102:9092'," +
                "  'format' = 'json')");

        tableEnv.executeSql("insert into sensor_sink select * from sensor_source where vc > 3");

//        env.execute();
    }
}


