package com.galeno.day02;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

/**
 * @author galeno
 * @Title:
 * @Description:
 * @date 2021/10/2720:36
 */
public class KafkaSource {
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",12345);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(conf);
        env.setParallelism(2);
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "192.168.77.101:9092,192.168.77.102:9092,192.168.77.103:9092");
        properties.setProperty("group.id", "test1");
        properties.setProperty("auto.offset.reset", "earliest"); //如果没有记录历史偏移量就从头读
        FlinkKafkaConsumer<String> flinkKafkaConsumer = new FlinkKafkaConsumer<>("liu", new SimpleStringSchema(), properties);
        DataStreamSource<String> lines = env.addSource(flinkKafkaConsumer);
        lines.print();
        env.execute();



    }
}
