package cn.enjoy.kafka.controller.origin;

import cn.enjoy.kafka.constant.KafkaConstant;
import io.swagger.annotations.ApiOperation;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RestController;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * @author K
 * @date 2024-12-24 14:12
 */
@RestController("originConsumer")
public class Consumer {
    private ExecutorService executorService = Executors.newCachedThreadPool();

    @ApiOperation("消费")
    @PostMapping("/consume")
    public void consume() {
        executorService.execute(() -> {
            // 1.创建消费者的配置对象
            Properties properties = new Properties();
            // 2.给消费者配置对象添加参数
            properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.38:9092");
            // 配置序列化 必须
            properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            // 配置消费者组（组名任意起名） 必须
            properties.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaConstant.ORIGIN_GROUP_ID);
            // 创建消费者对象
            KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
            // 注册要消费的主题（可以消费多个主题）
            ArrayList<String> topics = new ArrayList<>();
            topics.add(KafkaConstant.TOPIC_ORIGIN_TEST);
            kafkaConsumer.subscribe(topics);
            // 拉取数据打印
            while (true) {
                // 设置 1s 中消费一批数据
                ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
                // 打印消费到的数据
                for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                    System.out.println(consumerRecord);
                }
            }
        });
    }

    @ApiOperation("消费-offset")
    @PostMapping("/consumeOffset")
    public void consumeOffset() {
        executorService.execute(() -> {
            // 1.创建消费者的配置对象
            Properties properties = new Properties();
            // 2.给消费者配置对象添加参数
            properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.38:9092");
            // 配置序列化 必须
            properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            // 配置消费者组（组名任意起名） 必须
            properties.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaConstant.ORIGIN_GROUP_ID);
            // 创建消费者对象
            KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
            // 注册要消费的主题（可以消费多个主题）
            ArrayList<String> topics = new ArrayList<>();
            topics.add(KafkaConstant.TOPIC_ORIGIN_TEST);
            kafkaConsumer.subscribe(topics);
            Set<TopicPartition> assignment= new HashSet<>();
            while (assignment.size() == 0) {
                kafkaConsumer.poll(Duration.ofSeconds(1));
                // 获取消费者分区分配信息（有了分区分配信息才能开始消费）
                assignment = kafkaConsumer.assignment();
            }
            // 遍历所有分区，并指定 offset 从 1700 的位置开始消费
            for (TopicPartition tp: assignment) {
                kafkaConsumer.seek(tp, 1700);
            }
            // 拉取数据打印
            while (true) {
                // 设置 1s 中消费一批数据
                ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
                // 打印消费到的数据
                for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                    System.out.println(consumerRecord);
                }
            }
        });
    }

    @ApiOperation("消费-时间")
    @PostMapping("/consumeTime")
    public void consumeTime() {
        executorService.execute(() -> {
            // 1.创建消费者的配置对象
            Properties properties = new Properties();
            // 2.给消费者配置对象添加参数
            properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.1.38:9092");
            // 配置序列化 必须
            properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
            // 配置消费者组（组名任意起名） 必须
            properties.put(ConsumerConfig.GROUP_ID_CONFIG, KafkaConstant.ORIGIN_GROUP_ID);
            // 创建消费者对象
            KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
            // 注册要消费的主题（可以消费多个主题）
            ArrayList<String> topics = new ArrayList<>();
            topics.add(KafkaConstant.TOPIC_ORIGIN_TEST);
            kafkaConsumer.subscribe(topics);

            Set<TopicPartition> assignment = new HashSet<>();
            while (assignment.size() == 0) {
                kafkaConsumer.poll(Duration.ofSeconds(1));
                // 获取消费者分区分配信息（有了分区分配信息才能开始消费）
                assignment = kafkaConsumer.assignment();
            }
            HashMap<TopicPartition, Long> timestampToSearch = new HashMap<>();
            // 封装集合存储，每个分区对应一天前的数据
            for (TopicPartition topicPartition : assignment) {
                timestampToSearch.put(topicPartition, System.currentTimeMillis() - 1 * 24 * 3600 * 1000);
            }
            // 获取从 1 天前开始消费的每个分区的 offset
            Map<TopicPartition, OffsetAndTimestamp> offsets = kafkaConsumer.offsetsForTimes(timestampToSearch);
            // 遍历每个分区，对每个分区设置消费时间。
            for (TopicPartition topicPartition : assignment) {
                OffsetAndTimestamp offsetAndTimestamp = offsets.get(topicPartition);
                // 根据时间指定开始消费的位置
                if (offsetAndTimestamp != null) {
                    kafkaConsumer.seek(topicPartition, offsetAndTimestamp.offset());
                }
            }
            // 3 消费该主题数据
            while (true) {
                ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
                for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                    System.out.println(consumerRecord);
                }
            }
        });
    }
}
