package cn.itcast.flink.connector;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableEnvironment;

/**
 * 从kafka Topic中消费数据，基于Table API Connector连接器读取加载数据
 * @author lilulu
 * @date 2023-04-10 17:16
 */
public class SqlConnectorKafkaSourceDemo {
    public static void main(String[] args) {
        //表执行环境
        TableEnvironment tableEnv = TableEnvironment.create(EnvironmentSettings.newInstance().inStreamingMode().useBlinkPlanner().build());
        //定义输入表：从哪里读取的数据
        tableEnv.executeSql("CREATE TABLE tbl_log_kafka (\n" +
                "`user_id` STRING,\n" +
                "`item_id` INTEGER,\n" +
                "`behavior` STRING,\n" +
                "`ts` STRING\n" +
                ") WITH (\n" +
                "'connector' = 'kafka',\n" +
                "'topic' = 'log-topic',\n" +
                "'properties.bootstrap.servers' =\n" +
                "'node1.itcast.cn:9092,node2.itcast.cn:9092,node3.itcast.cn:9092',\n" +
                "'properties.group.id' = 'gid-1',\n" +
                "'scan.startup.mode' = 'latest-offset',\n" +
                "'format' = 'csv'\n" +
                ")");
        tableEnv.sqlQuery("select * from tbl_log_kafka").limit(10).execute().print();
    }
}
