package com.chen.flinkdemo.Kafka;

import com.chen.flinkdemo.Access;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;
import java.util.Random;

/**
 * @author: cy
 * @date: 2024/8/29 23:12
 * @description:  SourceFunction 的并行量默认是1
 */


public class kafkaApp  {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "101.35.241.96:9092");
        properties.setProperty("group.id", "test");
        DataStream<String> stream = env
                .addSource(new FlinkKafkaConsumer<>("flinkdemo", new SimpleStringSchema(), properties));


        System.out.println(stream.print());
    }
}
