package com.study.mq.consumer;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.List;

/**
 * 消费者
 *
 */
@Component
@Slf4j
public class KafkaConsumer {


    /**
     * topic设置了2个分区
     * 默认设置kafkaListenerContainerFactory并发有16个线程, 会有2个线程线程分别对应topic的2个分区消费
     * 因为poll设置了500, 并且设置手动ack
     * 那么每个线程对应poll一个分区500条数据, 手动ack整个分区批次
     */
    @KafkaListener(topics = "${kafka.service.topic}", containerFactory = "kafkaListenerContainerFactory", batch = "true")
    public void consumeMessage(List<ConsumerRecord<?, ?>> consumerRecords, Acknowledgment ack) {
        System.out.println("开始消费消息:"+consumerRecords.size());
        consumerRecords.forEach(consumerRecord -> {
            log.info("消费消息:{}", consumerRecord.toString());
            //System.out.println(consumerRecord);
        });
        ack.acknowledge();
    }


    // fixme: 也支持这样获取message, Message的api没有ConsumerRecord方便
    /*@KafkaListener(topics = "${kafka.service.topic}", containerFactory = "kafkaListenerContainerFactory", batch = "true")
    public void consumeMessage(List<Message<?>> messages, Acknowledgment ack, Consumer<?, ?> consumer) {
        log.info("----------------------------------开始消费消息----------------------------------");
        messages.forEach(message -> {
            Object payload = message.getPayload();
            MessageHeaders headers = message.getHeaders();
            log.info("payload: {}, headers: {}", payload, headers);
        });
        log.info("----------------------------------结束消费消息----------------------------------");
        ack.acknowledge();
    }*/
}
