package com.fwmagic.flink.projectcase.queryactivitycase.utils;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Properties;

public class FlinkUtilsV1 {

    /**
     * 创建KafkaSource
     * @param args
     * @param simpleStringSchema
     * @param env
     * @return
     */
    public static DataStream<String> createKafkaSource(String[] args, SimpleStringSchema simpleStringSchema,StreamExecutionEnvironment env) {
        Properties prop = new Properties();
        prop.setProperty("bootstrap.servers", "localhost:9092");
        prop.setProperty("group.id", "gp1");
        //从最早的数据开始消费
        prop.setProperty("auto.offset.reset", "earliest");
        //kafka的消费者不自动提交偏移量
        //prop.setProperty("enable.auto.commit", "false");

        //String topic = "activety01";
        String topic = "gps01";

        FlinkKafkaConsumer011<String> kafkaSource = new FlinkKafkaConsumer011<>(topic,
                simpleStringSchema,
                prop);
        DataStreamSource<String> dataStreamSource = env.addSource(kafkaSource);

        return dataStreamSource;
    }

}
