package com.example.kafkalearn.consumer;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Collections;
import java.util.List;
import java.util.Properties;

/**
 * 手动 同步提交offset
 * @author 何昌杰
 */
public class Consumer2Start {
    private static final String topic = "harris";
    public static void main(String[] args) {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        // 重试次数
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.51.34.61:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "group.demo");
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties);
        consumer.subscribe(Collections.singletonList(topic));
        TopicPartition tp = new TopicPartition(topic, 0);
        long lastConsumedOffset = -1;
        while(true){
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
            if(records.isEmpty()){
                break;
            }
            List<ConsumerRecord<String, String>> consumerRecords = records.records(tp);
            lastConsumedOffset = consumerRecords.get(consumerRecords.size() - 1).offset();
            // 同步提交
            consumer.commitSync();
            System.out.println("offset " + lastConsumedOffset);
            OffsetAndMetadata offsetAndMetadata = consumer.committed(tp);
            System.out.println("offset " + offsetAndMetadata.offset());
            long position = consumer.position(tp);
            System.out.println("offset " + position);
        }
    }
}
