package com.seari.client.kafka;

import java.io.IOException;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;

import javax.annotation.PostConstruct;

import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.seari.bean.ResultTrain;
import com.seari.bean.TrainInfo;
import com.seari.bean.WindowInfo;
import com.seari.controller.ATSWebsocketController;
import com.seari.pojo.ATSMessageBean;
import com.seari.service.TrainServiceImpl;
/*
 * 并发版本，暂存
 * */
//@Component
public class KafkaReceiverOld
{
	protected static Logger logger = LoggerFactory.getLogger(KafkaReceiverOld.class);
	private static DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH-mm-ss");
	private static final long MAX_TIME_GAP = 2 * 60 * 1000; //最大信息有效间隔：2分钟
	public final static ObjectMapper MAPPER = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
    private final Object LOCK = new Object();
	//@KafkaListener(topics = "#{'${spring.kafka.topics}'.split(',')}")
	public void onMessage(ConsumerRecord<?, ?> record)
	{
		Optional<?> kafkaMessage = Optional.ofNullable(record.value());
		if (kafkaMessage.isPresent())
		{
			Object message = kafkaMessage.get();
			//logger.info("----------------- record =" + record);
			//logger.info("------------------ message =" + message);
			try
			{
				if(StringUtils.isNotBlank(message.toString()))
				{
					long currentTime = System.currentTimeMillis();
					String messageStr = message.toString();
					JsonNode jsonNode = MAPPER.readTree(messageStr);
					jsonNode.get("msgHead").get("sendTime").asText();
					long sendTime = LocalDateTime.parse(jsonNode.get("msgHead").get("sendTime").asText(),dtf).toInstant(ZoneOffset.ofHours(8)).toEpochMilli();
					if(currentTime - sendTime > MAX_TIME_GAP) 
					{
						//丢弃2分钟前滞后数据
						logger.warn("message lag too long:{}",messageStr);
						return;
					}
					String lineId = jsonNode.get("msgHead").get("lineId").asText();
					int msgId = jsonNode.get("msgHead").get("msgId").asInt(0);
					//根据msgId决定处理方式
					if(msgId == KafkaMessageHelper.MESSAGE_ALL)
					{
						//do nothing
						logger.info("receive kafka all message:{}",messageStr);
					}else if(msgId == KafkaMessageHelper.MESSAGE_UPDATE) {
						TrainInfo trainInfo = MAPPER.treeToValue(jsonNode.get("msgBody"),TrainInfo.class);
						trainInfo.setUpdateTime(sendTime);
						trainInfo.setLineId(lineId);
						//System.out.println(trainInfo);
						convertTrainInfo(trainInfo);
					}else if(msgId == KafkaMessageHelper.MESSAGE_REMOVE) {
						//do nothing
						logger.info("receive kafka remove message:{}",messageStr);
						TrainInfo trainInfo = MAPPER.treeToValue(jsonNode.get("msgBody"),TrainInfo.class);
						trainInfo.setUpdateTime(sendTime);
						trainInfo.setLineId(lineId);
						removeTrain(trainInfo);
					}else if(msgId == KafkaMessageHelper.MESSAGE_ALIVE_STATUS) {
						//do nothing
						logger.info("receive kafka alive message:{}",messageStr);
					}else {
						//do nothing
					}
				}
			} catch (Exception e)
			{
				// TODO: handle exception
				e.printStackTrace();
			}
		}
	}
	
	public ResultTrain convertTrainInfo(TrainInfo originInfo)
	{
		ResultTrain resultTrain = null;
		String lineId = originInfo.getLineId();
		if(TrainServiceImpl.trainMap.get(lineId) == null)//线路不存在时进行创建线路及对应的车辆信息
		{
			synchronized (LOCK)
			{
				//双锁检查，避免双层concurrentHashmap导致的锁失效问题
				if(TrainServiceImpl.trainMap.get(lineId) == null) 
				{
					TrainServiceImpl.trainMap.put(lineId, new ConcurrentHashMap<>());
					TrainServiceImpl.trainMap.get(lineId).put(originInfo.getTrainGroupId(), process(originInfo, resultTrain));
				}
			}
		}else {
			//线路存在时进行车辆信息是否存在判断
			ConcurrentHashMap<String, ResultTrain> trainRefMap = TrainServiceImpl.trainMap.get(lineId);
			//不存在时转换至ResultTrain并添加到MAP中
			if(trainRefMap.get(originInfo.getTrainGroupId()) == null)
			{
				trainRefMap.put(originInfo.getTrainGroupId(), process(originInfo, resultTrain));
			}
			else {
				//存在时先获取既有车辆信息进行更新
				process(originInfo, trainRefMap.get(originInfo.getTrainGroupId()));
			}
		}
		return resultTrain;
	}
	
	private ResultTrain process(TrainInfo originInfo,ResultTrain originTrain)
	{
		if(originTrain == null) 
		{
			originTrain = new ResultTrain();
		}
		//判断是否需要处理列车数据
		if(originInfo.getUpdateTime() > originTrain.getUpdateTime()) 
		{
			String lineId = originInfo.getLineId();
			if(lineId.length() == 1) //线路号只有1位时，在前方补0
			{
				lineId = "0" + lineId;
			}
			//根据线路获取车次窗列表
			HashMap<String, WindowInfo> windowRefMap = TrainServiceImpl.windowInfoMap.get(lineId); 
			//详细处理流程
			if(lineId.equals(KafkaMessageHelper.LINE_03))
			{
				//3、4号线共线段特殊处理，列车组编号以4起始时认定为4号线
				if (originInfo.getTrainGroupId().startsWith("4"))
				{
					lineId = KafkaMessageHelper.LINE_04;
					windowRefMap = TrainServiceImpl.windowInfoMap.get(lineId);
				}
			}
			//根据ATS消息中的车次窗获取对应的CELL与PLATFORM
			WindowInfo windowInfo = windowRefMap.get(originInfo.getWinHandle()); 
			if(windowInfo != null && !(windowInfo.getWindowRef().equals(originTrain.getPosition())))
			{
				originTrain.setLineId(lineId);
				originTrain.setPosType(windowInfo.getWindowType());
				if(StringUtils.isBlank(originInfo.getTrainGroupId()))
				{
					originTrain.setTrainGroupId(KafkaMessageHelper.TRAIN_GROUP_PREFIX + originInfo.getTrainIndex());
				}else {
					originTrain.setTrainGroupId(originInfo.getTrainGroupId());
				}
				originTrain.setTrainIndex(originInfo.getTrainIndex());
				originTrain.setUpdateTime(originInfo.getUpdateTime());
				originTrain.setPosition(windowInfo.getWindowRef()); //车次窗对应CELL或PLATFORM的字符串
				originTrain.setPosition2(windowInfo.getWindowRef2());
				System.out.println(originTrain + " " +originTrain.getPosition() + " " + originTrain.getTrainGroupId());
				try
				{
					ATSMessageBean atsMessageBean = new ATSMessageBean();
					atsMessageBean.setType("update");
					atsMessageBean.setData(originTrain);
					ATSWebsocketController.broadCastInfo(MAPPER.writeValueAsString(atsMessageBean));
				} catch (JsonProcessingException e)
				{
					// TODO Auto-generated catch block
					e.printStackTrace();
				} catch (IOException e)
				{
					// TODO Auto-generated catch block
					e.printStackTrace();
				}
			}else {
				logger.info("ref:{},trainpos:{}",windowInfo.getWindowRef(),originTrain.getPosition());
			}
		}
		return originTrain;
	}
	
	private void removeTrain(TrainInfo originInfo)
	{
		try
		{
			ResultTrain resultTrain = TrainServiceImpl.trainMap.get(originInfo.getLineId()).get(originInfo.getTrainIndex());
			if(resultTrain != null)
			{
				ATSMessageBean atsMessageBean = new ATSMessageBean();
				atsMessageBean.setType("remove");
				atsMessageBean.setData(resultTrain);
				ATSWebsocketController.broadCastInfo(MAPPER.writeValueAsString(atsMessageBean));
				//从MAP中删除此车辆
				TrainServiceImpl.trainMap.get(originInfo.getLineId()).remove(originInfo.getTrainIndex());
			}
		} catch (Exception e)
		{
			// TODO: handle exception
			e.printStackTrace();
		}
	}
	
	@PostConstruct
	public void init()
	{
		logger.info("kafka receiver init:" + Thread.currentThread().getName());
	}
	
}
