package com.ibm.cps.spark.streaming.adapter;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.ibm.cps.message.AbstractMessage;
import com.ibm.util.TopologyStreamIds;
import com.ibm.util.exception.CPSException;

import backtype.storm.task.IOutputCollector;
import backtype.storm.tuple.Values;

/**
 * Created by telekinesis on 9/8/15.
 */
public class EmbeddedDatasourceSpout extends AbstractSparkEmbeddedSpout {
    private final String tenantid;
    private static Logger logger =  LoggerFactory.getLogger(EmbeddedDatasourceSpout.class);

    public EmbeddedDatasourceSpout(IOutputCollector collector, String tenantid){
        super(collector);
        this.tenantid = tenantid;
    }

    // TODO 
    public void init() throws CPSException {
//        try {
//            IPersistencyV2 persistency = MongoPersistencyV2.getInstace();
//            Collection<DataSourceMetadata> dataSources = persistency
//                    .getDataSourceTopicList(tenantid);
//            for (DataSourceMetadata datasource : dataSources) {
//                collector.emit(TopologyStreamIds.DATASOURCE_SPOUT_STREAM, null,
//                        new Values(datasource));
//            }
//        } catch (CPSException e) {
//            logger.error(Throwables.getStackTraceAsString(e));
//        }
    }

    @Override
    public void emitNewData(String streamName, AbstractMessage data) {
        collector.emit(TopologyStreamIds.DATASOURCE_SPOUT_STREAM, null, new Values(data));
    }
}
