package org.ccay.async;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import javax.inject.Inject;

import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;

import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.util.SerializationUtils;
import org.ccay.core.exception.ApplicationException;
import org.ccay.core.log.ILogger;
import org.ccay.core.log.CcayLoggerFactory;

/**
 * kafka消息监听器
 * 
 * @since 2015-12-25
 */
public class KafkaMessageListener implements DisposableBean, InitializingBean {

	private final ILogger logger = CcayLoggerFactory.getLogger(KafkaMessageListener.class);
	@Inject
	private IAsyncFramework asyncFramework;
	private ConsumerConnector consumer;
	private ExecutorService executor;
	
	private String conduit; 
	
	private KafkaConsumerFactory consumerFactory;
	
	public void setConduit(String conduit) {
		this.conduit = conduit;
	}

	private int threads;
	
	public void setThreads(int threads) {
		this.threads = threads;
	}

	public void destroy() throws Exception {
		shutdown();
	}

	
	public void shutdown() {
		if (consumer != null)
			consumer.shutdown();
		if (executor != null)
			executor.shutdown();
	}
	
	public void afterPropertiesSet() throws Exception {
		try {
			consumer = consumerFactory.createConsumer();
			start(conduit);
		} catch (Exception e) {
			logger.warn("kafka conduit listener fail,please check.");
			logger.error(e);
		}
	}

	/**
	 * 开启监听
	 * @param topic 通道名称
	 */
	@SuppressWarnings("rawtypes")
	public void start(String topic) {
		
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(topic, new Integer(threads));
		Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
		List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);

		executor = Executors.newFixedThreadPool(threads);

		int threadNumber = 0;
		for (final KafkaStream stream : streams) {
			executor.submit(new Consumer(stream, threadNumber));
			threadNumber++;
		}
	}

	/**
	 * 消息处理类
	 */
	@SuppressWarnings({"rawtypes", "unchecked"})
	class Consumer implements Runnable {
	    private KafkaStream stream;
	    private int threadNumber;
	 
		public Consumer(KafkaStream stream, int threadNumber) {
	        this.threadNumber = threadNumber;
	        this.stream = stream;
	    }
	 
		public void run() {
	        ConsumerIterator<byte[], byte[]> it = stream.iterator();
	        while (it.hasNext()) {
	        	MessageAndMetadata<byte[], byte[]> meta = it.next();
				try {
					//反序列化获取消息对象
					AsyncMessage message = (AsyncMessage)SerializationUtils.deserialize(meta.message());
					process(message);
					
				} catch (Exception e) {
					logger.error(e);
				}
	        }
	    }
		
		/**
		 * 处理消息
		 * @param message
		 */
		public void process(AsyncMessage message) {
			logger.debug("kafkaListener Thread " + threadNumber + " Received async message:" + message.getType());
			IMessageProcessor processor = asyncFramework.getMessageProcessor(message);
			if(processor != null) {
				try {
					processor.process(message);
				} catch (ApplicationException ex) {
					logger.error(ex);
				} catch(RuntimeException e){
					logger.error(e);
				} catch(Error e){
					logger.fatal(e);
					throw e;
				}
			}
		}
	}

	public void setConsumerFactory(KafkaConsumerFactory consumerFactory) {
		this.consumerFactory = consumerFactory;
	}
}
