package com.china.produce.listener;

import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Map;

/**
 * @ClassName
 * @Author niujie
 * @Version
 * @Description
 * @CreateTime 2023/4/1
 */
@Component
public class BigDataTopicListener {

    private static final Logger log = LoggerFactory.getLogger(BigDataTopicListener.class);

    //@KafkaListener(topics = {"test_topic"})
    public void batchConsume(ConsumerRecord<?, ?> consumerRecord, Acknowledgment ack) {

        ack.acknowledge();

        System.out.println(consumerRecord.value());

       // System.out.println(consumerRecord.toString());

//        Map maps = JSONObject.parseObject(consumerRecord.toString(), Map.class);
//
//        System.out.println(maps.get("dataKey"));

        // System.out.println(consumerRecords + "1");

//        for (ConsumerRecord<?, ?> record : consumerRecords) {
//
//            log.info("string : {}", record.toString());
//        }

      //  log.info("收到bigData推送的数据，拉取数据量：{}，消费时间：{}ms : threadName : {}", consumerRecords.size(), System.currentTimeMillis(), Thread.currentThread().getId());
    }

   // @KafkaListener(topics = {"test_topic"})
    public void batchConsume2(List<ConsumerRecord<?, ?>> consumerRecords, Acknowledgment ack) {

        ack.acknowledge();

        System.out.println(consumerRecords + "2");
       // for (ConsumerRecord<?, ?> record : consumerRecords) {

        //    log.info("string : {}", record.toString());
       // }

       // log.info("2收到bigData推送的数据，拉取数据量：{}，消费时间：{}ms : threadName : {}", consumerRecords.size(), System.currentTimeMillis(), Thread.currentThread().getId());
    }

}
