package com.youxin.logprocess.kafka;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;

/**
 * kafka消费消息队列
 * @author yingde.cao
 *
 */
public class KafkaConsumerTask {
	private static Logger logger=LoggerFactory.getLogger(KafkaConsumerTask.class);
    private Properties properties;
    private String[] topics;
	public KafkaConsumerTask(Properties properties,String ... topic)
	{
		Objects.requireNonNull(properties, "properties cannot be null");
		Objects.requireNonNull(topic, "topic cannot be null");
		this.properties=properties;
		this.topics=topic;
	}
	
	public void start(){
        KafkaConsumer<String, String> consumer =null;        
        consumer=new KafkaConsumer<>(this.properties);
        consumer.subscribe(Arrays.asList(topics));
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(1000);
            for (ConsumerRecord<String, String> record : records)
            {
                System.out.printf("offset = %d, key = %s, value = %s,date=%s\n", record.offset(), record.key(), record.value(),new Date().toString());
            }
        }
	}
	/**
	 * 列出主题
	 * @return
	 */
	public Map<String, List<PartitionInfo>> listTopics(){			
		KafkaConsumer<String, String> consumer =null;        
        consumer=new KafkaConsumer<>(this.properties);
        Map<String, List<PartitionInfo>> topics=consumer.listTopics();
        consumer.close();
        return topics;
	}
	
}
