package com;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.Test;

import java.time.Duration;
import java.util.*;

public class CustomConsumer3 {

    @Test
    public void test1() {
        // 从指定的topic消费消息，消费者有所属的group
        String TOPIC_NAME = "order";
        String groupId = "group_demo3";

        // 创建配置对象
        Properties props = new Properties();

        // 指定集群地址
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "node01:9092,node02:9092,node03:9092");

        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);

        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");

        // 设置消费者组名
        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);

        // 指定key、value的反序列化
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        // 创建kafkaconsumer
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);


        //获取主题的所有分区信息
        List<PartitionInfo> topicPartitions = consumer.partitionsFor(TOPIC_NAME);

        //从1小时前开始消费
        long fetchDataTime = new Date().getTime() - 1000 * 60 * 60;
        // 设置每个分区的拉取时间,记录了partition和需要回溯的时间
        Map<TopicPartition, Long> map = new HashMap<>();
        // 遍历主题的所有分区
        for (PartitionInfo par : topicPartitions) {
            map.put(new TopicPartition(TOPIC_NAME, par.partition()), fetchDataTime);
        }

        // 查询指定时间点的 offset 值
        Map<TopicPartition, OffsetAndTimestamp> parMap = consumer.offsetsForTimes(map);
        // 所有分区信息集合
        Set<TopicPartition> topicPartitionSet = parMap.keySet();
        // consumer执行消费的分区
        consumer.assign(topicPartitionSet);

        // 遍历获取的每个分区的 offset 信息，并将消费者定位到指定的 offset
        for (Map.Entry<TopicPartition, OffsetAndTimestamp> entry : parMap.entrySet()) {
            // 处理一个分区
            // 分区信息
            TopicPartition topicPartition = entry.getKey();
            // offset信息
            OffsetAndTimestamp value = entry.getValue();

            if (topicPartition == null || value == null) {
                continue;
            }
            // 拿到offset
            Long offset = value.offset();

            System.out.println("partition-" + topicPartition.partition() + "|offset-" + offset);
            //根据消费里的timestamp确定offset
            if (offset != null) {
                // 分配指定分区的offset
                consumer.seek(topicPartition, offset);
                System.out.println("topicPartition:" + topicPartition + ",offset:" + offset);
            }
        }

        // 持续性的拉取数据
        while (true) {
            // 消费者拉取消息, 在指定间隔时间后拉取一次，一次可能拉取多条数据，poll是以一个partition为单位来取数据
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMinutes(5));
            for (ConsumerRecord record : records) {
                System.out.println("topic:" + record.topic() + "---partition:" + record.partition()
                        + "---offset:" + record.offset() + "---key:" + record.key()
                        + "---value:" + record.value());

            }
            // consumer.commitSync();
        }


    }
}
