package com.haoziqi.chapter_05.Source;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Properties;

/**
 * description
 * created by A on 2021/3/11
 */
public class KafkaSource {
    public static void main(String[] args) {
        //1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //TODO Source -从Kafka读数据
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop1:9092,hadoop2:9092,hadoop3:9092");
        properties.setProperty("group.id", "fffffffffff");
        //三个参数,1)topic名 2)解码器 ，用于在Flink和Kafka之间进行数据转换  3)连接配置文件
        FlinkKafkaConsumer<String> kafkasource = new FlinkKafkaConsumer<>("flink0923",
                new SimpleStringSchema(),
                properties);

        env.addSource(kafkasource).print();
        //提交任务
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
