package org.example.kafka24;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

public class ConsumerDemo {

    public static void main(String[] args) {
        final Logger logger = LoggerFactory.getLogger(ConsumerDemo.class);
        Properties pros = new Properties();
        pros.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        pros.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        // pros.put("acs", "-1"); // leader 和 所有follower 都保存
        pros.put("bootstrap.servers", "192.168.110.97:9092,192.168.110.97:9093，192.168.110.97:9094");
        pros.put("group.id", "first_group");
        pros.put("auto.offset.reset", "earliest"); // latest earliest none other
        pros.put("enable.auto.commit", "true");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(pros);
        while (true) {
            consumer.subscribe(Arrays.asList("test"));
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5));// 间隔时间， 如果拉取不到数据，就5秒之后再去poll
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.topic() + "," + record.partition() + "," + record.offset() + "," + record.value());
            }
            // consumer.commitAsync(); // 手动提交
        }
    }
}
