package cn._51doit.flink.day11;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class KafkaSQLConnectorDemo {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);


        //Kafka中的数据为：1000,20000,view
        //使用KafkaConnector从Kafka中读取数据
        tEnv.executeSql(
                "CREATE TABLE tb_user_event (\n" +
                        "  `user_id` BIGINT,\n" +
                        "  `item_id` BIGINT,\n" +
                        "  `behavior` STRING,\n" +
                        "  `ts` TIMESTAMP(3) METADATA FROM 'timestamp'\n" +
                        ") WITH (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'user-behavior',\n" +
                        "  'properties.bootstrap.servers' = 'node-1.51doit.cn:9092,node-2.51doit.cn:9092,node-3.51doit.cn:9092',\n" +
                        "  'properties.group.id' = 'testGroup',\n" +
                        "  'scan.startup.mode' = 'earliest-offset',\n" +
                        "  'format' = 'csv'\n" +
                        ")"
        );

        Table table = tEnv.sqlQuery("select * from tb_user_event where user_id >= 1000");

        //使用DataStream的sink
        DataStream<Row> res = tEnv.toAppendStream(table, Row.class);
        res.print();

        env.execute();
    }
}
