package com.ibm.cps.spark.streaming.adapter;

import java.util.Collection;

import backtype.storm.task.IOutputCollector;
import backtype.storm.tuple.Values;

import com.ibm.cps.message.AbstractMessage;
import com.ibm.cps.message.MessageFactory;
import com.ibm.cps.newmessage.AbstractMetadata;
import com.ibm.cps.newmessage.BasicMetadata;
import com.ibm.cps.newmessage.MetadataFactory;
import com.ibm.cps.newmessage.OptimizedMetadata;
import com.ibm.util.TopologyStreamIds;
import com.ibm.util.exception.CPSException;

/**
 * Created by telekinesis on 5/4/15.
 */
public class EmbeddedProcessorSpout extends AbstractSparkEmbeddedSpout {
	private final String tenantid;

	public EmbeddedProcessorSpout(IOutputCollector collector, String tenantid) {
		super(collector);
		this.tenantid = tenantid;
	}

	public void init() throws CPSException {

		Collection<String> messages = MessageFactory
				.loadExistedMessage(tenantid);
		if (messages == null) {
			return;
		}
		for (String metadata : messages) {
			AbstractMetadata message = MetadataFactory
					.parseJsonMetadata(metadata);
			if (message instanceof BasicMetadata
					|| message instanceof OptimizedMetadata) {
				collector.emit(TopologyStreamIds.PROCESSOR_SPOUT_STREAM, null,
						new Values(message));
			} else { // TODO JointMetadata
				collector.emit(TopologyStreamIds.PROCESSOR_SPOUT_STREAM, null,
						new Values(message));
			}
		}
	}

	@Override
	public void emitNewData(String streamName, AbstractMessage data) {
		collector.emit(TopologyStreamIds.PROCESSOR_SPOUT_STREAM, null,
				new Values(data));
	}

}