package cn.tannn;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.Properties;

/**
 * @author tn
 * @date 2022-02-09 09:58
 */
public class Kafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties properties = new Properties();
        properties.put("enable.auto.commit", "true");
        properties.put("auto.commit.interval.ms", "1000");
        KafkaSource<String> source = KafkaSource.<String>builder()
                .setBootstrapServers("localhost:9092")
                .setTopics("quickstart-events") // kafakagu官方示例的topics
                .setGroupId("test-consumer-group") // kafka_2.13-3.1.0/config$ cat consumer.properties
                // https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/connectors/datastream/kafka/#starting-offset
                .setStartingOffsets(OffsetsInitializer.earliest()) // 全部数据（从最早的偏移量开始  ）
                .setStartingOffsets(OffsetsInitializer.latest()) // 丢弃之前的（从最近的偏移量开始  ）
//                .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.EARLIEST)) // 从提交偏移量开始，如果提交偏移量不存在，也使用最早的复位策略
//                .setStartingOffsets(OffsetsInitializer.committedOffsets()) // 从提交偏移量开始
                .setValueOnlyDeserializer(new SimpleStringSchema())
                .setProperties(properties)
                .build();



        DataStreamSource<String> kafkaSource = env
                .fromSource(source, WatermarkStrategy.noWatermarks(), "Kafka Source");
        // flink-cdc-demo-1
        kafkaSource.flatMap((FlatMapFunction<String, String>) (s, collector) -> collector.collect(s)).returns(String.class);
        kafkaSource.print();
        env.execute("Kafka read");
    }
}
