package com.hub.datasource;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Properties;

/**
 * kafka 环境准备：
 * 进入kafka容器
 * docker exec -it kafka-1 bash
 * 创建topic
 * $KAFKA_HOME/bin/kafka-topics.sh --create --topic topic02 --zookeeper 121.41.59.197:2181 --replication-factor 1 --partitions 1
 * 创建生产者
 * $KAFKA_HOME/bin/kafka-console-producer.sh --broker-list 121.41.59.197:9091 --topic topic02
 *
 */
public class DataSourceFromKafKa {

    public static void main(String[] args) throws Exception {

        // 环境准备
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();

        // 数据源准备
        Properties propertiesFromKafka = getPropertiesFromKafka();
        DataStreamSource<String> dataStreamSource = executionEnvironment.addSource(new FlinkKafkaConsumer011<String>(
                "topic02",
                new SimpleStringSchema(),
                propertiesFromKafka));

        dataStreamSource.print("from_kafka");

        executionEnvironment.execute();

    }


    private static Properties getPropertiesFromKafka(){
        Properties properties = new Properties(); properties.setProperty("bootstrap.servers", "121.41.59.197:9091");
        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("auto.offset.reset", "latest");
        return properties;
    }

}
