package com.example.dddsample.application.event;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.PartitionOffset;
import org.springframework.kafka.annotation.TopicPartition;
import org.springframework.kafka.listener.AcknowledgingMessageListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;

import java.util.List;

/**
 * @author: bebm
 * @date: 2022/4/1 15:12
 * @description:
 */
@Slf4j
@Component
public class CustomerKafkaConsumerEvent {

    /**
     * consumerID为bebm，监听topicName为topic.quick.demo的Topic
     * @param data
     * @param key 获取发送消息的key
     * @param partition 获取当前消息是从哪个分区中监听到的
     * @param topic 获取监听的TopicName
     * @param ts 获取时间戳
     */
    /*@KafkaListener(id = "bebm", topics = "customerOperTopic")
    public void listen(@Payload String data,
                       @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) Integer key,
                       @Header(KafkaHeaders.RECEIVED_PARTITION_ID) int partition,
                       @Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
                       @Header(KafkaHeaders.RECEIVED_TIMESTAMP) long ts) {
        log.info("demo receive : " + data);
    }*/


    //单条消费
    /*@KafkaListener(topics = {"customerOperTopic"}, groupId = "bebm")
    public void customerOper(ConsumerRecord<?, ?> record) {
        String json = record.value().toString();
        //类型转换
        log.info("日志输出:{}", json);
        //写入持久层
    }*/

    /**
     * PartitionOffset：partition --分区Id，非数组，initialOffset --初始偏移量
     *
     * @param records
     */
    //批量消费
    @KafkaListener(topics = {"customerOperTopic"}, groupId = "bebm", containerFactory = "batchContainerFactory"/*,
            topicPartitions = {
                    @TopicPartition(topic = "customerOperTopic", partitions = {"1", "3"}),
                    @TopicPartition(topic = "customerOperTopic", partitions = {"0", "4"},
                            partitionOffsets = @PartitionOffset(partition = "2", initialOffset = "100"))
            }*/)
    public void customerOperBatch(List<ConsumerRecord<?, ?>> records) {
        if (!CollectionUtils.isEmpty(records)) {
            records.forEach(x -> {
                String json = x.value().toString();
                //类型转换
                log.info("日志输出:{}", json);
                //写入持久层
            });
        }
    }


    //同步手动提交消费
    /*@KafkaListener(topics = {"customerOperTopic"}, groupId = "bebm",containerFactory = "ackContainerFactory")
    public void syncCustomerOper(ConsumerRecord record, Acknowledgment ack) {
        //确认消费
        ack.acknowledge();
    }*/
}
