package com.whale.springboot.kafka.consumer;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.*;

@Component
@Slf4j
public class ConsumerListener {

    /**
     * 消费单条消息,topics 可以监听多个topic，如：topics = {"topic1", "topic2"}
     * @param message 消息
     */
    @KafkaListener(id = "consumerSingle", topics = {"hello-kafka-test-topic"})
    public void consumerSingle(String message) {
        log.info("consumerSingle ====> message: {}", message);
    }

    /**
     * 批量消费消息
     * @param messages
     */
    @KafkaListener(id = "consumerBatch", topics = "hello-batch")
    public void consumerBatch(List<ConsumerRecord<String, String>> messages) {
        log.info("consumerBatch =====> messageSize: {}", messages.size());
        log.info(messages.toString());
    }

    /**
     * 指定消费异常处理器
     * @param message
     */
    @KafkaListener(id = "consumerException", topics = "hello-kafka-test-topic-exception", errorHandler = "consumerAwareListenerErrorHandler")
    public void consumerException(String message) {
        throw new RuntimeException("consumer exception");
    }

    /**
     * 验证ConsumerInterceptor
     * @param message
     */
    @KafkaListener(id = "interceptor", topics = "consumer-interceptor")
    public void consumerInterceptor(String message) {
        log.info("consumerInterceptor ====> message: {}", message);
    }


    /**
     * 按照分区批量消费消息
     * @param messages
     */
    @KafkaListener(topics = "jkhtest6", groupId = "consumerBatchTest18")
    public void consumerBatchTest(List<ConsumerRecord<String, String>> messages, Consumer consumer) {
        log.info("consumerBatch =====> messageSize: {}", messages.size());
        Map<Integer, List<ConsumerRecord<String, String>>> map = new HashMap<>();

        for (ConsumerRecord<String, String> message : messages) {
            List<ConsumerRecord<String, String>> consumerRecords = null;
            if (map.containsKey(message.partition())) {
                consumerRecords = map.get(message.partition());
                consumerRecords.add(message);
            }
            else {
                consumerRecords = new ArrayList<>();
                consumerRecords.add(message);
                map.put(message.partition(), consumerRecords);
            }
        }

        Set<Map.Entry<Integer, List<ConsumerRecord<String, String>>>> entries = map.entrySet();
        for (Map.Entry<Integer, List<ConsumerRecord<String, String>>> entry : entries) {
            List<ConsumerRecord<String, String>> value = entry.getValue();
            for (ConsumerRecord<String, String> stringStringConsumerRecord : value) {
                log.info(stringStringConsumerRecord.toString());
            }

            long poff = value.get(value.size() - 1).offset() + 1;
            OffsetAndMetadata pom = new OffsetAndMetadata(poff);
            Map<TopicPartition, OffsetAndMetadata> mapP = new HashMap<>();

            TopicPartition topicPartition = new TopicPartition("jkhtest6", entry.getKey());

            mapP.put(topicPartition, pom);

            consumer.commitSync(mapP);
            log.info("分区{} 已提交，offset={}", entry.getKey(), poff);
        }
    }

}