/**
 * 
 */
package org.metaverse.h2;

import gnu.trove.TObjectLongHashMap;

import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;

import javax.sql.DataSource;

import org.metaverse.h2.cache.ValueCache;
import org.metaverse.h2.sql.SQL;
import org.metaverse.warehouse.Senary;
import org.openrdf.model.Value;
import org.singularity.io.CloseablePipe;
import org.singularity.io.DME;
import org.singularity.io.DME.Code;

/**
 * @author vjache
 * 
 */
public class SenaryMainPipe implements CloseablePipe<Senary> {

	private final DataSource _connPool;
	private final PreparedStatement _senaryInsert;
	private final PreparedStatement _senaryDelete;
	private final Connection _conn;
	
	private final HashSet<Senary> _bunch;
	private final int _bunchSize;
	private final ValueCache _cache;
	private final Code _opCode;

	/**
	 * @throws SQLException
	 * 
	 */
	public SenaryMainPipe(
			DataSource aDataSource, 
			int aBunchSize, 
			SQL aSql, 
			ValueCache aValueCache, 
			DME.Code aOpCode) throws SQLException {
		_connPool = aDataSource;
		_conn = _connPool.getConnection();
		_senaryInsert = _conn.prepareStatement(aSql.getSenaryInsert());
		_senaryDelete = _conn.prepareStatement(aSql.getSenaryDelete());
		_bunchSize = aBunchSize;
		_cache = aValueCache;
		_bunch = new HashSet<Senary>(_bunchSize);
		_opCode = aOpCode;
	}

	@Override
	public void close() throws IOException {
		try
		{
			flush();
		}
		finally
		{
			if (_senaryInsert != null)
				try {
					_senaryInsert.close();
				} catch (SQLException e) {
					throw new IOException(e);
				}
			if (_conn != null)
				try {
					_conn.close();
				} catch (SQLException e) {
					throw new IOException(e);
				}
		}
	}

	@Override
	public void flush() throws IOException {
		try {
			write(_bunch);
			_bunch.clear();
		} catch (SQLException e) {
			throw new IOException(e);
		}
	}

	@Override
	public void push(Senary aItem) throws IOException {
		_bunch.add(aItem);
		if(_bunch.size()>=_bunchSize)
			flush();
	}
	
	private void write(Collection<Senary> aBunch) throws SQLException {
		
		final boolean isAdd = _opCode == Code.Add;
		final PreparedStatement operation = isAdd? _senaryInsert : _senaryDelete;
		
		// 1. Write nodes, obtain ID's
		final HashSet<Value> vals = new HashSet<Value>(aBunch.size() * 3);
		for (Senary sen : aBunch) {
			vals.addAll(sen.getSlots(Senary.SPOCU));
		}

		final TObjectLongHashMap<Value> val2idMap = readIdsThroughCache(vals);
		try
		{
			for (final Senary sen : aBunch) {
				int idx = 1;
				operation.setLong(idx++, val2idMap.get(sen.getSubject()));
				operation.setLong(idx++, val2idMap.get(sen.getPredicate()));
				operation.setLong(idx++, val2idMap.get(sen.getObject()));
				operation.setLong(idx++, val2idMap.get(sen.getContext()));
				operation.setLong(idx++, val2idMap.get(sen.getUser()));
				if(isAdd)
					operation.setTimestamp(idx++, toTimestamp(sen.getLastAssertTime()));
				
				operation.addBatch();
			}
			
			operation.executeBatch();
		}
		finally
		{
			operation.clearBatch();
		}
	}

	private Timestamp toTimestamp(Date lastAssertTime) {
		if(lastAssertTime == null)
			return new Timestamp(System.currentTimeMillis());
		return new Timestamp(lastAssertTime.getTime());
	}

	private TObjectLongHashMap<Value> readIdsThroughCache(Set<Value> vals) throws SQLException {
		return _cache.getIdsForValues(vals, true);
	}

	

}
