package org.apache.lucene.index;

import java.io.IOException;

import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMOutputStream;
import org.apache.lucene.util.OpenBitSet;

/**
 * Takes old block, updated docs, outputs new block
 * 
 */
public class TagBlockMerger {
  private TagBlockTermDocs tagBlockTermDocs;
  private int numDocs;
  private int skipInterval;
  private int maxSkipLevels;
  TagBlockWriter tagBlockData;
  int docFreq;

  public TagBlockMerger(TagBlockTermDocs tagBlockTermDocs, int numDocs, int skipInterval, int maxSkipLevels) {
    this.tagBlockTermDocs = tagBlockTermDocs;
    this.numDocs = numDocs;
    this.skipInterval = skipInterval;
    this.maxSkipLevels = maxSkipLevels;
  }

  public int merge(boolean add, DocIdSet docs) throws IOException {
    OpenBitSet newSet = new OpenBitSet(numDocs);
    setExistingDocs(newSet);
    if (add) {
      DocIdSetIterator addsIterator = docs.iterator();
      while (addsIterator.next()) {
        newSet.fastSet(addsIterator.doc());
      }
    } else {
      DocIdSetIterator deletesIterator = docs.iterator();
      while (deletesIterator.next()) {
        newSet.fastClear(deletesIterator.doc());
      }
    }
    docFreq = (int)newSet.cardinality();
    tagBlockData = new TagBlockWriter(skipInterval, maxSkipLevels, numDocs);
    DocIdSetIterator newSetIterator = newSet.iterator();
    while (newSetIterator.next()) {
      tagBlockData.add(newSetIterator.doc());
    }
    return docFreq;
  }

  public void write(IndexOutput output) throws IOException {
    tagBlockData.write(output);
  }

  /**
   * Load existing docs for the block
   * 
   * @param newSet
   * @throws IOException
   */
  private void setExistingDocs(OpenBitSet newSet) throws IOException {
    while (tagBlockTermDocs.next()) {
      newSet.set(tagBlockTermDocs.doc());
    }
  }
}
