package com.sdemo.utils.mq.kafka;

import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.log4j.Logger;

import com.alibaba.fastjson.JSONObject;
import com.google.common.eventbus.AsyncEventBus;

import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.consumer.TopicFilter;
import kafka.consumer.Whitelist;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;

public class KfakaMsgConsumer  {
	private static final Logger log=Logger.getLogger(KafkaMsgProducer.class);
    private static ConsumerConnector  consumer; 
	private static String zookeeper= null;//ConfigUtil.getString("kafka.zookeeper.list", "113.98.254.180:2181");
	private static String consumerGroup= null;//ConfigUtil.getString("kafka.consumer.group","test");
	
 
	/**
	 *线程池
	 */
	private static ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
	/**
	 * 消息总线
	 */
	public static AsyncEventBus msgBus = new AsyncEventBus(executorService);

	public static void init() {
		 
	      Properties props = new Properties();  
	      // 设置zookeeper的链接地址
	      props.put("zookeeper.connect",zookeeper);  
	      // 设置group id
	      props.put("group.id", consumerGroup);  
	      // kafka的group 消费记录是保存在zookeeper上的, 但这个信息在zookeeper上不是实时更新的, 需要有个间隔时间更新
	      props.put("auto.commit.interval.ms", "1000");
	      props.put("zookeeper.session.timeout.ms","10000");  
	      //客户端
	      ConsumerConfig config = new ConsumerConfig(props);  
	      consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);  
	}
	/**
	 * 注册监听
	 * @param topic
	 * @param listener
	 * @param clazz
	 */
	public static <T> void addListener(final String topic,Object listener,final Class<T> clazz){
//		msgBus.register(listener);
		executorService.submit(new Runnable() {
			
			@Override
			public void run() {
				TopicFilter topicFilter = new Whitelist(topic); 
				List<KafkaStream<byte[], byte[]>> parttions = consumer.createMessageStreamsByFilter(topicFilter, 1);
				for (KafkaStream<byte[], byte[]> kafkaStream : parttions) {
					ConsumerIterator<byte[], byte[]> ite = kafkaStream.iterator();
					while(ite.hasNext()){
						MessageAndMetadata<byte[], byte[]> meta = ite.next();
						//String key=new String(meta.key());
						byte[] msg=meta.message();
						System.out.println(JSONObject.toJSON(msg));
						try{
//							Kryo kryo=new Kryo();
//							Input in=new Input(msg);
//							T t = kryo.readObject(in, clazz);
						}catch(Exception e){
							log.error("",e);
						}

					}
				}
				
			}
		});
    
	}
	/**
	 * 关闭
	 */
	public void close(){
		if(null != consumer){
			consumer.shutdown();
		}
		if(null != executorService){
			executorService.shutdown();
		}
	}



}
