package com.kamikaze.docidset.impl;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;

import org.apache.lucene.search.DocIdSet;

import com.kamikaze.docidset.api.DocSet;
import com.kamikaze.docidset.bitset.MyOpenBitSet;


public abstract class AbstractDocSet extends DocIdSet implements DocSet, Serializable
{
	
  
    private static final long serialVersionUID = 1L;
  
	public static final int INVALID = -1;
	
	private static final double logBase2 = Math.log(2);
	
	/** Transient int for checking for Comodification with the iterator.
	 * 
	 */
	 protected transient int modCount = 0;
	
	/** Default batch size for compression blobs 
	 * 
	 */
	public int BATCH_SIZE = 128;
	
	/** Default batch size for compression blobs 
	 * 
	 */
	protected int BATCH_OVER = 12;
	
	
	/**	Current base size
	 * 
	 */
	protected int current_base;
	 
	/** Last added value
	 * 
	 */
	private int lastAdded = 0;
	
	/** List of Data blobs
	 * 
	 */
	protected List<MyOpenBitSet> blob = null;
	
	
	/** Pointer to the current data block.
	 * 
	 */
	protected int[] current = null;
	
	/** Size of the current array 
	 * 
	 */
	protected int current_size = 0;
	
	/** Current Max bit count
 	 * 
	 */
	protected int current_ex_count = 0;
	
	/** Current Bit Size
	 * 
	 */
	protected  int current_b = 1;
	
	/** B Value accumulator
	 * 
	 */
	protected  int [] bVal = null;
	
	/** Internal compression Method
	 * 
	 * @param base
	 * @param b
	 * @return
	 */
	 protected abstract Object compress();

	 
	 protected AbstractDocSet()
	 {
	   this.blob = new ArrayList<MyOpenBitSet>();
	 }
	 
 
	/** Internal Decompression Method
	 * 
	 * @return
	 */
	private int[] decompress(MyOpenBitSet packedSet)
	{
		System.err.println("Method not implemented");
		return null;
	}
	 
	/** Internal Decompression Method
	 * 
	 * @return
	 */
	protected int[] decompress(BitSet packedSet)
	{
		System.err.println("Method not implemented");
		return null;
	}
	

	private void initSet() 
	{
		this.current = new int[BATCH_SIZE];
		current_size = 0;
		current_b = (int) Math.ceil(Math.log(BATCH_OVER)/logBase2);
		//blob = new ArrayList<MyOpenBitSet>();
		bVal = new int[33];
	}

	/** Number of compressed units plus the last block
	 * 
	 */
	public int size() 
	{
		return blob.size()*BATCH_SIZE+current_size;
	}
	

	/** Add document to this set
	 * 
	 */
	public void addDoc(int docid) 
	{
		if(size() == 0)
		{
			initSet();
			current[current_size++]  = docid;
			current_base = docid;
			lastAdded = current_base;
		}
		
		else if(current_size == BATCH_SIZE)
		{
			
			int exceptionCount = 0;
			
			
			// formulate b value. Minimum bits used is 5.
			for(int k=31;k>3;k--)
			{
			    //System.out.print(bVal[k]+":");
				exceptionCount+=bVal[k];
				if(exceptionCount>=BATCH_OVER)
				{
					current_b = k;
					exceptionCount-=bVal[k];
					break;
				}
			}
			
			
			//Compensate for extra bit
			current_b+=1;
			
			//set current_exception_count
			current_ex_count = exceptionCount;
			
			
			blob.add((MyOpenBitSet) compress());
			
			// roll the batch
			current_size = 1;
			current_base = docid;
			lastAdded = current_base;
			current[0] = current_base;
			current_ex_count = 0;
			
			bVal = new int[33];
			
		 }// end batch boundary
		
		else{
				try
				{
				    
					current[current_size] = docid - lastAdded;
					lastAdded = docid;
					if(current[current_size]!=0)
						bVal[(int)(Math.log(current[current_size])/logBase2)+1]++;
					
					current_size++;
				}
				catch(ArrayIndexOutOfBoundsException w)
				{
					System.err.println("Error inserting DOC:"+docid);
					
				}
			
			} // end append to end of array
		
	}

}
