package org.dragonnova.iot.business.cloud.statistics;

import java.util.Date;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.Stack;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;

import org.dragonnova.iot.business.cloud.conf.Constants;
import org.dragonnova.iot.business.cloud.mq.message.NetFlowStatMessage;
import org.dragonnova.iot.business.cloud.mq.message.TagMessage;
import org.dragonnova.iot.business.cloud.mq.service.KafkaService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.util.ErrorHandler;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.SerializerFeature;

public class MessageProcessor implements InitializingBean, DisposableBean {

	@Autowired
	private KafkaService kafkaService;

	private final static Logger LOGGER = LoggerFactory
			.getLogger(MessageProcessor.class);
	private static final int TASK_POOL_SIZE = 2;
	private static final int TASK_THRESHOLD_PERIOD = 2000;
	private static final int TASK_STAT_PERIOD = 1000;

	private final ConcurrentMap<String, Object> statStore;
	private final ConcurrentMap<String, Object> thresholdStore;
	private final Stack<Double> upFlowStack;
	private final Stack<Double> downFlowStack;

	private final ThreadPoolTaskScheduler taskScheduler;
	private volatile boolean running = false;

	public MessageProcessor() {
		this.statStore = new ConcurrentHashMap<String, Object>(5);
		this.thresholdStore = new ConcurrentHashMap<String, Object>(1024);
		this.taskScheduler = new ThreadPoolTaskScheduler();
		this.taskScheduler.initialize();
		this.upFlowStack = new Stack<>();
		this.downFlowStack = new Stack<>();
		init();
	}

	public void init() {
		this.taskScheduler.setPoolSize(TASK_POOL_SIZE);
		this.taskScheduler.setThreadNamePrefix(MessageProcessor.class.getName()
				+ "-T");
		this.taskScheduler.setDaemon(true);
		this.taskScheduler.setErrorHandler(new MessageErrorHandler());
		startThread();
	}

	public Object getStatCache(Object key) {
		return this.statStore.get(key);
	}

	public void putUpFlow(Double upflow) {
		this.upFlowStack.add(upflow);
	}

	public void putThreshold(String key, Object value) {
		this.thresholdStore.put(key, value);
	}

	public boolean isRunning() {
		return this.running;
	}

	private void startThread() {
		this.running = true;

		this.taskScheduler.scheduleAtFixedRate(new ThresholdTask(),
				TASK_THRESHOLD_PERIOD);
		this.taskScheduler
				.scheduleAtFixedRate(new StatTask(), TASK_STAT_PERIOD);
	}

	private class ThresholdTask implements Runnable {

		@Override
		public void run() {

			while (isRunning()) {
				try {
					try {
						Thread.sleep(100);
					} catch (InterruptedException e) {

					}

					if (!thresholdStore.isEmpty()) {
						handlerMessage();
					}
				} catch (Exception e) {
					LOGGER.error("send stat message error.", e);
				}
			}
		}

		private void handlerMessage() {
			for (Iterator<Entry<String, Object>> iter = thresholdStore
					.entrySet().iterator(); iter.hasNext();) {
				TagMessage tagMessage = (TagMessage) iter.next().getValue();
				String jsonMessage = JSON.toJSONString(tagMessage,
						SerializerFeature.IgnoreNonFieldGetter);
				kafkaService.sendMessage(kafkaService.createProducerRecord(
						Constants.TOPIC_ALARM,
						Long.toHexString(tagMessage.getPhone()), jsonMessage));
				iter.remove();
			}
		}
	}

	private class StatTask implements Runnable {

		private Thread thread;

		@Override
		public void run() {
			thread = Thread.currentThread();

			while (isRunning()) {
				try {
					try {
						Thread.sleep(100);
					} catch (InterruptedException e) {

					}

					if (!upFlowStack.isEmpty()) {
						handlerMessage();
					}

				} catch (Exception e) {
					LOGGER.error("send stat message error.", e);
				}
			}
		}

		private void handlerMessage() {
			NetFlowStatMessage statMessage = createStatMessage();
			String jsonMessage = JSON.toJSONString(statMessage,
					SerializerFeature.IgnoreNonFieldGetter);
			// 发送所有统计信息
			kafkaService.sendMessage(kafkaService.createProducerRecord(
					Constants.TOPIC_NETFLOW, statMessage.getId(), jsonMessage));
		}

		private NetFlowStatMessage createStatMessage() {
			Double avgUpFLow = upFlowStack.stream().reduce((acc, item) -> {
				return acc + item;
			}).orElse(0.0);
			Double avgdownFLow = downFlowStack.stream().reduce((acc, item) -> {
				if (item.isNaN())
					return 0.0;
				return acc + item;
			}).orElse(0.0);

			avgUpFLow = avgUpFLow / upFlowStack.size()
					/ (TASK_STAT_PERIOD / 1000);
			avgdownFLow = avgdownFLow / downFlowStack.size()
					/ (TASK_STAT_PERIOD / 1000);
			NetFlowStatMessage statMessage = new NetFlowStatMessage();
			statMessage.setCount(upFlowStack.size() + downFlowStack.size());
			statMessage.setCreateTime(new Date());
			statMessage.setUpflow(avgUpFLow);
			statMessage.setDownflow(avgdownFLow.isNaN() ? 0.0 : avgdownFLow);
			statMessage.setId(Constants.idGenerator.generateId().toString());
			upFlowStack.clear();
			downFlowStack.clear();

			return statMessage;
		}

		private boolean isIoThread() {
			return Thread.currentThread() == thread;
		}
	}

	@Override
	public void destroy() throws Exception {
		this.running = false;
		this.taskScheduler.shutdown();
		this.statStore.clear();
		this.thresholdStore.clear();
		this.upFlowStack.clear();
	}

	@Override
	public void afterPropertiesSet() throws Exception {

	}

	private class MessageErrorHandler implements ErrorHandler {

		@Override
		public void handleError(Throwable t) {
			LOGGER.error("statcache handler error.", t);
		}
	}

}
