package com.linkstec.mot.service;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.linkstec.mot.bean.KafkaConfig;
import com.linkstec.mot.util.ConfigUtil;
import com.linkstec.mot.util.ExceptionTracker;
import com.linkstec.mot.util.StringUtil;

/**
 * KAFKA消息接收
 * 
 * @author PENGYAO
 * 
 */
@Service("kafkaConsumerService")
public class KafkaConsumerService {

	private static Logger logger = LoggerFactory
			.getLogger(KafkaConsumerService.class);
	@Autowired
	private KafkaConfig kafkaConfig;

	public void start() {
		Properties props = new Properties();
		props.put("zookeeper.connect", kafkaConfig.getZkList());
		props.put("zookeeper.session.timeout.ms", "30000");
		props.put("zookeeper.sync.time.ms", "2000");

		props.put("group.id", kafkaConfig.getGroupId());
		props.put("auto.offset.reset", kafkaConfig.getOffsetReset());
		props.put("auto.commit.interval.ms", kafkaConfig.getCommitInterval());
		props.put("fetch.message.max.bytes ", kafkaConfig.getMessageMaxBytes());

		kafka.consumer.ConsumerConfig config = new kafka.consumer.ConsumerConfig(
				props);
		ConsumerConnector consumer = kafka.consumer.Consumer
				.createJavaConsumerConnector(config);

		String[] topicArr = getTopicArr();

		Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer
				.createMessageStreams(getTopicMap(topicArr));

		ExecutorService kafkaCustomerPool = Executors
				.newFixedThreadPool(topicArr.length * kafkaCustomerThreads);

		for (String topicItem : topicArr) {
			List<KafkaStream<byte[], byte[]>> streamList = consumerMap
					.get(topicItem);
			int streamListSize = streamList.size();
			logger.info(String.format(
					"start [%s] kafka customer  threads for topic: %s",
					streamListSize, topicItem));

			for (int i = 0; i < streamListSize; i++) {
				KafkaStream<byte[], byte[]> kafkaStream = streamList.get(i);
				logger.info(String.format(
						"start kafka customer threads[%s] topic : %s ", i,
						topicItem));
				kafkaCustomerPool.execute(new MsgParserService(kafkaStream));
			}
		}

		kafkaCustomerPool.shutdown();

		// 数据处理线程池
		ExecutorService dataWorkerPool = Executors
				.newFixedThreadPool(dataWorkerThreads);
		logger.info(String.format("dataWorker thread size:%s...",
				dataWorkerThreads));
		for (int i = 0; i < dataWorkerThreads; i++) {
			dataWorkerPool.execute(new DataWorkerService());
			logger.info(String.format("start [%s] dataWorker thread.", i));
		}
		dataWorkerPool.shutdown();

		while (true) {
			try {
				Thread.sleep(1000 * 3600);
			} catch (InterruptedException e) {
				logger.error(ExceptionTracker.trace(e));
			}
		}

	}

	private int kafkaCustomerThreads = Integer.parseInt(ConfigUtil.getProperty(
			"kafka.threads", "1"));
	private int dataWorkerThreads = Integer.parseInt(ConfigUtil.getProperty(
			"data.worker.threads", "4"));

	/**
	 * 
	 * @param topicArr
	 * @return
	 */
	private Map<String, Integer> getTopicMap(String[] topicArr) {
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		for (String topic : topicArr) {
			topicCountMap.put(topic, kafkaCustomerThreads);
		}
		return topicCountMap;
	}

	/**
	 * 
	 * @return
	 */
	private String[] getTopicArr() {
		String messageTopic = ConfigUtil.getProperty("kafka.topic");
		if (StringUtil.isNullOrEmpty(messageTopic)) {
			throw new RuntimeException("kafka.topic未配置，服务退出运行");
		}
		String[] topicArr = messageTopic.split(",");
		return topicArr;
	}
}
