package org.apache.ocean.transaction;

import java.io.IOException;
import java.util.HashSet;

import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.RAMDirectory;
import org.apache.ocean.CDouble;
import org.apache.ocean.CLong;

public class Batch {
	public CLong id;
  public HashSet<CLong> deletedTransactionIDs = new HashSet<CLong>();
  public HashSet<CLong> addedTransactionIDs = new HashSet<CLong>();
  public IndexWriter indexWriter;
  public RAMDirectory directory;
  public Index index;
  boolean isCommitted = false;
  
  public Batch(CDouble writerBufferSize, CLong id, Index index) throws Exception {
  	this.id = id;
  	this.index = index;
  	directory = new RAMDirectory();
  	indexWriter = new IndexWriter(directory, false, index.indexSchema.getAnalyzer());
  	if (writerBufferSize != null) indexWriter.setRAMBufferSizeMB(writerBufferSize.value);
  }
  
  public void commit() throws Exception {
  	if (isCommitted) throw new Exception("already committed");
  	indexWriter.optimize(true);
  	indexWriter.close(true);
  	index.add(this);
  	isCommitted = true;
  }
  
  public void delete(CLong transactionID) throws Exception {
  	if (isCommitted) throw new Exception("already committed");
  	deletedTransactionIDs.add(transactionID);
  }
  
  public void addDocument(Document document) throws Exception {
  	if (isCommitted) throw new Exception("already committed");
  	CLong transactionID = new CLong(index.transactionIDSequence.incrementAndGet());
    addedTransactionIDs.add(transactionID);
  	indexWriter.addDocument(document, index.indexSchema.getAnalyzer());
  }
}
