package hn.cch.kafka.listener;


import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.List;

@Component
public class TopicListener {

    private final static Logger logger = LoggerFactory.getLogger(TopicListener.class);


    // @KafkaListener(
    //     autoStartup = "true",
    //     topics = "#{'${custom.kafka.topics}'.split(',')}")
    // public void listen(ConsumerRecord<String, String> consumerRecord) {
    //     logger.info("{}:{}:{}:{}:{}", consumerRecord.topic(), consumerRecord.key(),
    //         consumerRecord.partition(), consumerRecord.offset(), consumerRecord.value());
    // }
    //
    // @KafkaListener(
    //     // containerFactory = "containerFactory",
    //     autoStartup = "${custom.kafka.auto-startup}",
    //     topics = "#{'${custom.kafka.topics}'.split(',')}")
    // public void listenAck(ConsumerRecord<String, String> consumerRecord, Acknowledgment acknowledgment) {
    //     logger.info("{}:{}:{}:{}:{}", consumerRecord.topic(), consumerRecord.key(),
    //         consumerRecord.partition(), consumerRecord.offset(), consumerRecord.value());
    //     // 手动提交
    //     acknowledgment.acknowledge();
    // }

    // @KafkaListener(
    //     containerFactory = "containerFactory",
    //     autoStartup = "${custom.kafka.auto-startup}",
    //     topics = "#{'${custom.kafka.topics}'.split(',')}")
    // public void listenBatch(List<ConsumerRecord<String, String>> list, Acknowledgment acknowledgment) {
    //     logger.info("batch size:{}", list.size());
    //     // acknowledgment.acknowledge();
    // }
}
