package com.margo.project.consumer.kafka.client.controller;

import java.util.Collection;
import java.util.LinkedList;
import java.util.List;

import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.time.Duration;

@RestController
@RequestMapping("/kafka/consumer")
public class MargoKafkaConsumerController {
	private Logger logger=LoggerFactory.getLogger(this.getClass());

	@Autowired
    private KafkaConsumer<String, String> kafkaConsumer;
	
	@RequestMapping("/consumer")
	public int consumer() {
		List<String> topics=new LinkedList<String>();
		topics.add("margo-kafka-topic");
		kafkaConsumer.subscribe(topics,new ConsumerRebalanceListener() {
			
			@Override
			public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
				// TODO Auto-generated method stub
				logger.info("分区");
			}
			
			@Override
			public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
				// TODO Auto-generated method stub
				
			}
		});
		Duration timeout=Duration.ofMillis(500);
		ConsumerRecords<String, String> consumerRecords=kafkaConsumer.poll(timeout);
		consumerRecords.forEach(consumerRecord->{
			logger.info("key:"+consumerRecord.key());
		});
		kafkaConsumer.commitSync();//同步提交
		return 1;
	}
}
