package com.gator.kafka;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.*;

/**
 * @Author PABLO
 * @Date 2022/6/1 17:30
 * @Desc 指定一定时间之前重新消费
 */
public class PABLO_KafkaConsumer4  extends Thread {

    KafkaConsumer<Integer, String> consumer;
    String topic;

    public PABLO_KafkaConsumer4(String topic) {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092,localhost:9093,localhost:9094");
        properties.put(ConsumerConfig.CLIENT_ID_CONFIG, "pablo-consumer");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "pablo_group_1");
        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        consumer = new KafkaConsumer<Integer, String>(properties);
        this.topic = topic;
    }


    @Override
    public void run() {
        //场景：近5个小时数据消费有问题，从5小时前的位置重新消费

        //topic
        consumer.subscribe(Collections.singleton(this.topic));
        //指定位置消费  获取当前分配给消费者的分区集
        Set<TopicPartition> assignment = consumer.assignment();
        //保证分区分配方案已制定完毕
        while (assignment.size() == 0) {
            consumer.poll(Duration.ofSeconds(1));
            assignment = consumer.assignment();
        }

        //将时间转换为对应offset
        HashMap<TopicPartition, Long> map = new HashMap<>();
        for (TopicPartition topicPartition : assignment) {
            //取出每一个分区topicPartition
            //5小时之前                * 1000 转为毫秒
            map.put(topicPartition, System.currentTimeMillis() - 3600 * 5 * 1000);
        }
        //根据时间戳查找对应偏移量
        Map<TopicPartition, OffsetAndTimestamp> topicPartitionOffsetAndTimestampMap = consumer.offsetsForTimes(map);
        //指定消费的offset位置
        assignment.forEach(topicPartition -> {
            OffsetAndTimestamp offsetAndTimestamp = topicPartitionOffsetAndTimestampMap.get(topicPartition);
            consumer.seek(topicPartition, offsetAndTimestamp.offset());
        });


        while (true) {
            //这里可设置一个Boolean/interrupt，用于打断，避免空轮询
            //Duration.ofSeconds(1)设置1S拉取一次
            ConsumerRecords<Integer, String> consumerRecords = consumer.poll(Duration.ofSeconds(1));
            consumerRecords.forEach(record -> {
                System.out.println(record);

            });

        }
    }

    public static void main(String[] args) {
        new PABLO_KafkaConsumer4("test_partitions").start();
    }
}
