package com.sijibao.gather.consumer.kafka;

import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.errors.InterruptException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.fastjson.JSON;
import com.sijibao.gather.consumer.ConsumerFileProcessor;
import com.sijibao.gather.consumer.ConsumerHandler;
import com.sijibao.gather.event.Event;
import com.sijibao.gather.event.serialization.Deserializer;
import com.sijibao.gather.event.serialization.Serializer;

public class ConsumerHandlerImpl
		implements ConsumerHandler
{
	private static final Logger logger = LoggerFactory
			.getLogger(ConsumerHandlerImpl.class);
	private static final long POLL_TIMEOUT_MS = 10 * 365 * 24 * 3600 * 1000L;
	
	private static final long POLL_PERIOD_MS=60*1000L;
	
	private static volatile boolean running = true;

	/**
	 * 卡夫卡适配器
	 */
	private KafkaConsumerAdapter kafkaConsumerAdapter;
	/**
	 * 消费者文件处理器
	 */
	private ConsumerFileProcessor consumerFileProcessor;
	/**
	 * 序列化map
	 */
	private Map<String, Serializer<Event>> serializerMap;
	/**
	 * 反序列化map
	 */
	private Map<String, Deserializer<Event>> deserializerMap;
	
	//----------------------------------------------------------------

	public KafkaConsumerAdapter getKafkaConsumerAdapter()
	{
		return kafkaConsumerAdapter;
	}

	public void setKafkaConsumerAdapter(KafkaConsumerAdapter kafkaConsumerAdapter)
	{
		this.kafkaConsumerAdapter = kafkaConsumerAdapter;
	}

	public ConsumerFileProcessor getConsumerFileProcessor()
	{
		return consumerFileProcessor;
	}

	public void setConsumerFileProcessor(
			ConsumerFileProcessor consumerFileProcessor)
	{
		this.consumerFileProcessor = consumerFileProcessor;
	}

	public Map<String, Serializer<Event>> getSerializerMap()
	{
		return serializerMap;
	}

	public void setSerializerMap(Map<String, Serializer<Event>> serializerMap)
	{
		this.serializerMap = serializerMap;
	}

	public Map<String, Deserializer<Event>> getDeserializerMap()
	{
		return deserializerMap;
	}

	public void setDeserializerMap(
			Map<String, Deserializer<Event>> deserializerMap)
	{
		this.deserializerMap = deserializerMap;
	}

	public void dispose()
	{

		while (running)
		{
			try
			{
				KafkaConsumer<String, String> consumer = kafkaConsumerAdapter.getConsumer();

				ConsumerRecords<String, String> records = consumer
						.poll(POLL_TIMEOUT_MS);
				
				if (records == null || records.isEmpty())
					continue;

				Map<String, List<String>> map = new HashMap<String, List<String>>();

				for (ConsumerRecord<String, String> record : records)
				{
					logger.info("Received message:offset={},key={},value={}",
							record.offset(), record.key(), record.value());

					// 超类
					Event fatherEvent = JSON.parseObject(record.value(),
							Event.class);
					String className = fatherEvent.getClassName();

					// 子类
					Class<?> clazz = null;
					try
					{
						clazz = this.getClass().getClassLoader()
								.loadClass(className);
					} catch (ClassNotFoundException e)
					{
						logger.error(e.getMessage());
						continue;
					}
					Object sonEvent = JSON.parseObject(record.value(), clazz);

					Serializer<Event> deserializer = getSerializerMap()
							.get(className);

					if (deserializer == null)
					{
						logger.error("className={},not found serializer.",
								className);
						continue;
					}

					String serializeStr = deserializer
							.serialize((Event) sonEvent);

					List<String> list = map.get(className);
					if (list == null)
					{
						list = new LinkedList<String>();
					}

					list.add(serializeStr);
					map.put(className, list);
				}

				// 将不同的业务写入不同的文件
				for (String key : map.keySet())
				{
					getConsumerFileProcessor().writeLines(map.get(key), key);
				}
				map = null;
				consumer.commitSync();
				Thread.sleep(POLL_PERIOD_MS);
			} catch(InterruptedException e)
			{
				continue;
			}catch(InterruptException e)
			{
				continue;
			}
			catch (Exception e)
			{
				logger.error(e.getMessage(), e);
			}
		}
		
		if(kafkaConsumerAdapter!=null)
		{
			kafkaConsumerAdapter.destroy();
		}
	}

	
	public void destory()
	{
		logger.info("Consumer handler Begin Destroy...");
		running=false;
		
		if (serializerMap != null)
		{
			serializerMap.clear();
			serializerMap = null;
		}

		if (deserializerMap != null)
		{
			deserializerMap.clear();
			deserializerMap = null;
		}
		
		if(consumerFileProcessor!=null)
		{
			consumerFileProcessor.destory();
		}

		logger.info("Consumer handler Destroy Success");
	}

}
