package streaming.api.source;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import utils.PropertiesReader;

import java.util.Properties;

/**
 *  从kafka源读取数据
 *  env.addSource(FlinkKafkaConsumer)
 */
public class Source4_kafka {

    private static String kafkaServers = PropertiesReader.get("default.kafka.servers");
    private static String topicFrom = PropertiesReader.get("default.kafka.topic.json.C");

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        Properties props = new Properties();
        props.setProperty("bootstrap.servers", kafkaServers);
        props.setProperty("group.id", "flink-consumer-Source4_kafka");

        //DataStream dataStream = env.addSource(new FlinkKafkaConsumer(topic, new SimpleStringSchema(), props));
        FlinkKafkaConsumer kafkaConsumer = new FlinkKafkaConsumer(topicFrom, new SimpleStringSchema(), props);
        DataStream dataStream = env.addSource(kafkaConsumer);

        dataStream.print("data:");

        env.execute();
    }

}
