package com.pony.test;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.io.Writable;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;

public class IntermediateForm implements Writable {
	
	private RAMDirectory dir;
	private IndexWriter writer;
	
	public IntermediateForm() throws IOException{
		dir = new RAMDirectory();
		if (writer==null) 
			writer = createWriter();
		
	}
	
	 public void closeWriter() throws IOException {
		    if (writer != null) {
		      writer.close();
		      writer = null;
		    }
	 }
	
	public Directory getDirectory() {
	    return dir;
	}
	
	public void process(Document doc,Analyzer analyzer) throws IOException{
		writer.addDocument(doc, analyzer);
	}
	
	private IndexWriter createWriter() throws IOException {
		IndexWriter writer = new IndexWriter(dir, null, true,IndexWriter.MaxFieldLength.UNLIMITED);
		writer.setUseCompoundFile(false);
		
		return writer;
	}
	
	private void resetForm() throws IOException {
	    
	    if (dir.sizeInBytes() > 0) {
	      // it's ok if we don't close a ram directory
	      dir.close();
	      // an alternative is to delete all the files and reuse the ram directory
	      dir = new RAMDirectory();
	    }
	    assert (writer == null);
	    
	  }
	
	@Override
	public void readFields(DataInput in) throws IOException {
		resetForm();
		RAMDirectoryUtil.readRAMFiles(in, dir);
	}

	@Override
	public void write(DataOutput out) throws IOException {
		String[] files = dir.listAll();
		RAMDirectoryUtil.writeRAMFiles(out, dir, files);
	}

}
