package com.lvmama.rhino.service.impl;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import javax.annotation.PostConstruct;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;

import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.lvmama.rhino.mapper.ManGodCountMapper;
import com.lvmama.rhino.pojo.ManGodCount;
import com.lvmama.rhino.service.StreamService;
import com.lvmama.rhino.utils.CommonUtils;
import com.lvmama.rhino.utils.ConfigHelper;
import com.lvmama.rhino.utils.JedisTemplate;
import com.lvmama.rhino.utils.RhinoConstant;

@Service("streamService")
public class StreamServiceImpl implements StreamService {
	
	private JedisTemplate write = JedisTemplate.getWriterInstance();
	
	private JedisTemplate read = JedisTemplate.getReaderInstance();
	
	private static ObjectMapper mapper = new ObjectMapper();
	
	@Autowired
	private ManGodCountMapper countMapper;
	//保存实时的60条数据
	private List<ManGodCount> result = null;
	
	private Logger logger = Logger.getLogger(StreamServiceImpl.class);

	@PostConstruct
	public void init() throws IOException {
		Properties prop = ConfigHelper.getProperties("consumer.properties");
		ConsumerConfig config = new ConsumerConfig(prop);
		ConsumerConnector consumer = Consumer
				.createJavaConsumerConnector(config);
		RhinoConstant rhinoConstant = RhinoConstant.getInstance();
		String topic = rhinoConstant.getValue("topicForManGodCount");
		Integer threadNum = Integer.valueOf(rhinoConstant.getValue("threadNumForManGodCount"));
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(topic, threadNum);

		Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer
				.createMessageStreams(topicCountMap);
		List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topic);

		for (final KafkaStream<byte[], byte[]> kafkaStream : streams) {
			new Thread(new Runnable() {
				@Override
				public void run() {
					for (MessageAndMetadata<byte[], byte[]> mm : kafkaStream) {
						String json = new String(mm.message());
						logger.info("get manGodCount message :"+json);
						if(null != json){
							try {
								write.lpush(CommonUtils.KEY_FOR_MAN_GOD, json);
								write.lpush(CommonUtils.KEY_FOR_MAN_GOD_REAL_TIME, json);
							} catch (Exception e) {
								e.printStackTrace();
								logger.info("save manGodCount message failed ");
							}
						}
						
					}
				}

			}).start();

		}
	}
	
	@Override
	public List<ManGodCount> getManGodCountRealTime() {
		List<String> jsons = null;
		Long lenth = read.llen(CommonUtils.KEY_FOR_MAN_GOD_REAL_TIME);
		if(lenth == 0l){
			return null;
		}
		if(lenth > 60){
			 jsons = read.lrange(CommonUtils.KEY_FOR_MAN_GOD_REAL_TIME, 0, 59);
			 write.ltrim(CommonUtils.KEY_FOR_MAN_GOD_REAL_TIME, 0, 59);
		}else{
			jsons = read.lrange(CommonUtils.KEY_FOR_MAN_GOD_REAL_TIME, 0, -1);
		}
		
		try {
			List<ManGodCount> result =	mapper.readValue(jsons.toString(), mapper.getTypeFactory().constructCollectionType(List.class, ManGodCount.class));
			return result;
		} catch (Exception e) {
			e.printStackTrace();
			return null;
		}
	}
}
