package com.zyh.flink.day02.source;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Properties;

public class FromKafkaTest {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        /*
        * flink要作为kafka的消费者,设置4个参数创建出kafka消费对象
        * */
        String topicName = "topic-flink-source";
        //反序列化方式,将kafka中存储的消息的value部分反序列化回String
        SimpleStringSchema simpleStringSchema = new SimpleStringSchema();
        Properties prop = new Properties();
        //kafka集群入口
        prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"kafka24:9092");
        //消费者组名
        prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"flink");
        //创建flinkKafka消费对象
        FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(topicName,simpleStringSchema,prop);

        DataStreamSource<String> result = environment.addSource(consumer);
        result.print();
        environment.execute("a");
    }
}
