package zhao.exmaple;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.KafkaSourceBuilder;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Properties;

public class KafkaSourceDemo {
    public static void main(String[] args) throws Exception {

        one();


    }

    private static void two() {
        // StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // env.setParallelism(1);
        //
        //
        // KafkaSourceBuilder<Object> bbb = KafkaSource.builder()
        //         .setBootstrapServers("node1:9092,node2:9092,node3:9092")
        //         .setTopics("bbb")
        //         .setStartingOffsets(OffsetsInitializer.latest())
        //         .setGroupId("flink-group").setDeserializer(KafkaRecordDeserializer.valueOnly(StringDeserializer.class));
        //
        //
        // // .setCommitOffsetsOnCheckpoints(true);
        //
        //
        // env.fromSource(source, WatermarkStrategy.noWatermarks(), "kafkaconsumer");
    }

    public static void one() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "node1:9092,node2:9092,node3:9092");
        properties.setProperty("group.id", "flink-group");
        // properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        // properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("auto.offset.reset", "earliest");
        properties.setProperty("enable.auto.commit", "true");
        properties.setProperty("auto.commit.interval.ms", "1000");
        properties.setProperty("session.timeout.ms", "30000");
        properties.setProperty("max.poll.records", "100");
        FlinkKafkaConsumer<String> flinkKafkaConsumer = new FlinkKafkaConsumer<>("bbb", new SimpleStringSchema(), properties);

        DataStreamSource<String> kafkaDataSource = env.addSource(flinkKafkaConsumer);
        kafkaDataSource.print();
        env.execute();
    }
}
