package com.ibm.cps.spark.streaming.adapter;

import java.util.Collection;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;

import backtype.storm.spout.ISpoutOutputCollector;
import backtype.storm.task.OutputCollector;
import backtype.storm.topology.IRichBolt;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;

import com.ibm.cps.spark.streaming.TestTuple;

/**
 * Created by telekinesis on 4/28/15.
 */
public class SparkEmbeddedCollector extends OutputCollector implements
		ISpoutOutputCollector {

	public SparkEmbeddedCollector() {
		super(null);
	}

	private ConcurrentHashMap<String, IRichBolt> bolts = new ConcurrentHashMap<>();

	public void addBolt(String streamId, IRichBolt bolt) {
		bolts.put(streamId, bolt);
	}

	public void sendDataToBolt(String streamId, Values values){
		List<Object> listRef = values;
		Collection<Tuple> nullRef = null;
		System.out.println("listRef: "+listRef);
		this.emit(streamId, nullRef, listRef);
	}

	@Override
	public List<Integer> emit(String streamId, Collection<Tuple> anchor,
			List<Object> tuples) {
		IRichBolt bolt = bolts.get(streamId);
		bolt.execute(new TestTuple(streamId, tuples));
		return null;
	}

	@Override
	public void emitDirect(int i, String streamId, Collection<Tuple> anchor,
			List<Object> tuples) {
		IRichBolt bolt = bolts.get(streamId);
		bolt.execute(new TestTuple(streamId, tuples));
	}

	@Override
	public void ack(Tuple tuple) {

	}

	@Override
	public void fail(Tuple tuple) {

	}

	@Override
	public void reportError(Throwable throwable) {

	}

	@Override
	public List<Integer> emit(String streamId, List<Object> tuple,
			Object messageId) {
		emit(streamId, (Collection<Tuple>) null, tuple);
		return null;
	}

	@Override
	public void emitDirect(int i, String streamId, List<Object> tuple,
			Object messageId) {
		emitDirect(i, streamId, (Collection<Tuple>) null, tuple);
	}
}
