package com.icbc.dccsh.storm.mongodb.bolt;

import java.util.LinkedList;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang.Validate;
import org.apache.commons.lang3.StringUtils;
import org.bson.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.icbc.dccsh.storm.mongodb.mapper.MongoMapper;
import com.icbc.dccsh.storm.util.TupleUtils;

import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Tuple;

public class MongoInsertBolt extends AbstractMongoBolt {
	private static final Logger LOG = LoggerFactory.getLogger(MongoInsertBolt.class);

	private static final int DEFAULT_FLUSH_INTERVAL_SECS = 1;

	private MongoMapper _mapper;

	private boolean _ordered = true; // default is ordered.

	private int _batchSize = 10000;

	private List<Tuple> _tupleBatch;

	private int _flushIntervalSecs = DEFAULT_FLUSH_INTERVAL_SECS;

	private String _collectionName = "default_";
	
	public MongoInsertBolt(String url, String collectionName, Map configuration, MongoMapper mapper) {
		super(url);

		this._collectionName = collectionName;
		this._batchSize = (Integer) configuration.get("mongo.batchsize");
		this._ordered = (Boolean) configuration.get("mongo.ordered");
		this._flushIntervalSecs = (Integer) configuration.get("mongo.flushinterval");

		Validate.notNull(mapper, "MongoMapper can not be null");

		this._mapper = mapper;

		this._tupleBatch = new LinkedList<Tuple>();
	}

	@Override
	public void execute(Tuple tuple) {
		boolean forceFlush = false;
		try {
			if (TupleUtils.isTick(tuple)) {
				LOG.debug("TICK received! current batch status [{}/{}]", _tupleBatch.size(), _batchSize);
				collector.ack(tuple);
				forceFlush = true;
			} else {
				_tupleBatch.add(tuple);
				if (_tupleBatch.size() >= _batchSize) {
					forceFlush = true;
				}
			}

			if (forceFlush && !_tupleBatch.isEmpty()) {
				String lastCollectionNameSuffix = "default";
				List<Document> docs = new LinkedList<Document>();
				for (Tuple t : _tupleBatch) {
					Document doc = _mapper.toDocument(t);
					docs.add(doc);
					String cn = doc.getString("indexname");
					if (!StringUtils.isEmpty(cn)){
						lastCollectionNameSuffix = cn;
					}
				}
				 
				mongoClient.insert(_collectionName + lastCollectionNameSuffix, docs, forceFlush);
//				mongoClient.insert(docs, ordered);

				for (Tuple t : _tupleBatch) {
					collector.ack(t);
				}
				_tupleBatch.clear();
			}
		} catch (Exception e) {
			this.collector.reportError(e);
			for (Tuple t : _tupleBatch) {
				collector.fail(t);
			}
			_tupleBatch.clear();
		}
	}

	public MongoInsertBolt withBatchSize(int batchSize) {
		this._batchSize = batchSize;
		return this;
	}

	public MongoInsertBolt withOrdered(boolean ordered) {
		this._ordered = ordered;
		return this;
	}

	public MongoInsertBolt withFlushIntervalSecs(int flushIntervalSecs) {
		this._flushIntervalSecs = flushIntervalSecs;
		return this;
	}

	@Override
	public Map<String, Object> getComponentConfiguration() {
		return TupleUtils.putTickFrequencyIntoComponentConfig(super.getComponentConfiguration(), _flushIntervalSecs);
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {

	}

}