package com.atguigu.flink.tableapi;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.*;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;

import static org.apache.flink.table.api.Expressions.$;

/**
 * Created by Smexy on 2022/12/21
 *
 *      可以使用表直接连接外部的数据源：
 *              文件系统
 *              kafka
 */
public class Demo5_ReadKafka
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        Kafka kafka = new Kafka()
            .topic("topicD")
            .property(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop102:9092")
            .property(ConsumerConfig.GROUP_ID_CONFIG,"test1")
            .property(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"true")
            .property(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"500")
            //这个组从来没有消费过，默认latest
            .startFromGroupOffsets()
            //必须加
            .version("universal")
            ;


        Schema schema = new Schema().field("id", DataTypes.STRING())
                                   .field("ts", DataTypes.BIGINT())
                                   .field("vc", DataTypes.INT());

        //读文件，制作为表
        tableEnv.connect(kafka)
                .withFormat(new Json())  //声明kafka中数据的格式
                .withSchema(schema)     //声明表结构
                .createTemporaryTable("t1"); //声明表的名字

        //查询
        Table table = tableEnv.from("t1");

        table.select($("id"), $("ts"), $("vc"))
             .execute()
             .print();

    }
}
