package com.ibm.cps.storm;

import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;

import org.apache.log4j.Logger;

import com.google.common.base.Throwables;
import com.google.gson.Gson;
import com.ibm.cps.message.ParsedDataMessage;
import com.ibm.cps.newmessage.AbstractMetadata;
import com.ibm.cps.processors.AbstractProcessor;
import com.ibm.cps.processors.ProcessorFactory;
import com.ibm.util.ErrorCode;
import com.ibm.util.TopologyMessageFieldIds;
import com.ibm.util.TopologyStreamIds;
import com.ibm.util.exception.CPSException;

import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseRichBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;

@SuppressWarnings("rawtypes")
public class ProcessorBolt extends BaseRichBolt {
	private static final long serialVersionUID = 1L;
	private HashMap<String, AbstractProcessor> id2Processor;
	private OutputCollector collector;
	private ProcessorFactory factory;
	private static final Logger logger = Logger.getLogger(ProcessorBolt.class);

	@Override
	public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
		id2Processor = new HashMap<String, AbstractProcessor>();
		this.collector = collector;
		try {
			factory = ProcessorFactory.getInstance();
		} catch (CPSException e) {
			logger.error(Throwables.getStackTraceAsString(e));
			logger.error(e.getMessage());
		}
	}

	public void manuallyPrepare(int taskId, OutputCollector collector) {
		id2Processor = new HashMap<String, AbstractProcessor>();
		this.collector = collector;
		try {
			factory = ProcessorFactory.getInstance();
		} catch (CPSException e) {
			logger.error(e.getMessage());
		}
	}

	@Override
	public void execute(Tuple input) {
		try {
			String streamId = input.getSourceStreamId();
			logger.info("Stream id in processor bolt is " + streamId);

			// metadata stream
			if (isMetadataStream(streamId)) {
				AbstractMetadata metadata = (AbstractMetadata) input.getValue(0);

 				AbstractProcessor processor = factory.createProcessor(metadata, id2Processor);
				if (processor != null) {
					factory.updateProcessorMap(metadata.getProcessorid(), processor, id2Processor, metadata);
					logger.info("Add processor " + metadata.getProcessorid()
							+ " in processor bolt, current Processors: " + new Gson().toJson(id2Processor.keySet()));
				}
			} else if (isMetadataDeleteStream(streamId)) {
				String source = (String) input.getValue(0);
				String destination = (String) input.getValue(1);

				if (id2Processor.get(destination) != null) {
					AbstractProcessor processor = id2Processor.get(destination);
					factory.deleteProcessorMap(id2Processor, processor, source);
				} else {
					logger.warn("Processor " + destination + " doesn't exist.");
				}
			} else if (isDataStream(streamId)) {
				ParsedDataMessage dataMessage = (ParsedDataMessage) input.getValue(1);
				String destination = input.getString(0);
				logger.info("destination is " + destination);
				// for (ParsedDataMessage msg : messageCol) {
				if (id2Processor.get(destination) == null) {
					throw new CPSException(ErrorCode.NULL, "No processor for " + destination);
				}
				boolean isBuffer = (boolean) input.getValue(3);
				if (isBuffer) {
					id2Processor.get(destination).startBatch();
				} else {
					id2Processor.get(destination).end();
				}
				Collection<ParsedDataMessage> returnData = id2Processor.get(destination).execute(dataMessage);
				groupMessagesByTskey(returnData, destination);
			} else if (isSparkBatchEnding(streamId)) {
				shutdownForSparkStreaming();
			}
		} catch (Throwable e) {
			logger.error(Throwables.getStackTraceAsString(e));
			logger.error(e.getMessage());
		}
	}

	private void groupMessagesByTskey(Collection<ParsedDataMessage> returnData, String processorid) {
		Map<Comparable, Collection<ParsedDataMessage>> tskey2Msgs = new HashMap<>();
		Comparable tskey;
		if (returnData != null && returnData.size() >= 1) {
			for (ParsedDataMessage retMsg : returnData) {
				if (retMsg != null && retMsg.getObjectValues() != null) {
					tskey = retMsg.getTsKey();
					if (tskey2Msgs.get(tskey) == null) {
						tskey2Msgs.put(tskey, new ArrayList<ParsedDataMessage>());
					}
					tskey2Msgs.get(tskey).add(retMsg);
				}
			}
			for (Map.Entry<Comparable, Collection<ParsedDataMessage>> entry : tskey2Msgs.entrySet()) {
				collector.emit(TopologyStreamIds.STREAM_TO_DISPATCH,
						new Values(entry.getValue(), entry.getKey(), processorid));
			}
		}
	}

	private boolean isMetadataDeleteStream(String streamId) {
		if (streamId == null) {
			return false;
		}
		return streamId.equals(TopologyStreamIds.METADATA_DELETE_STREAM);
	}

	private boolean isDataStream(String streamId) {
		if (streamId == null) {
			return false;
		}
		return streamId.equals(TopologyStreamIds.DATAMESSAGE_STREAM);
	}

	private boolean isMetadataStream(String streamId) {
		if (streamId == null) {
			return false;
		}
		return streamId.equals(TopologyStreamIds.METADATA_STREAM);
	}
	
	private boolean isSparkBatchEnding(String streamId){
		if (streamId == null) {
			return false;
		}
		return streamId.equals(TopologyStreamIds.SPARK_BATCH_ENDING);
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		declarer.declareStream(TopologyStreamIds.STREAM_TO_SEND, new Fields(TopologyMessageFieldIds.SOURCE,
				TopologyMessageFieldIds.DESTINATION, TopologyMessageFieldIds.TYPE, TopologyMessageFieldIds.MESSAGE));
		declarer.declareStream(TopologyStreamIds.STREAM_TO_DISPATCH, new Fields(TopologyMessageFieldIds.SOURCE,
				TopologyMessageFieldIds.MESSAGE, TopologyMessageFieldIds.TS_KEY));
	}
	
	public void shutdownForSparkStreaming(){
		for(String processorId : id2Processor.keySet()){
			logger.info("===== Shutting down topology during spark batch end");
			AbstractProcessor processor = id2Processor.get(processorId);
			processor.dispose();
		}
	}
}