package org.newlucene.core.index;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Enumeration;
import java.util.List;
import java.util.Vector;

import org.newlucene.core.document.Document;
import org.newlucene.core.store.InputStream;
import org.newlucene.core.util.BitVector;

final class SegmentReader extends IndexReader
{
	private boolean closeDirectory = false;
	private String segment;
	FieldInfos fieldInfos;
	private FieldDataReader fieldsReader;

	SegmentTermInfos tis;
  
	BitVector deletedDocs = null;
	private boolean deletedDocsDirty = false;

	InputStream freqStream;

	private static class Norm 
	{
		public Norm(InputStream in) { this.in = in; }
		public InputStream in;
		public byte[] bytes;
	}
	private Hashtable norms = new Hashtable();

	SegmentReader(SegmentInfo si, FieldInfos fieldInfos, boolean closeDir) throws IOException 
	{
		this(si, fieldInfos);
		closeDirectory = closeDir;
	}

	SegmentReader(SegmentInfo si, FieldInfos fieldInfos) throws IOException 
	{
		super(si.dir);
		segment = si.name;

		this.fieldInfos = fieldInfos;
		fieldsReader = new FieldDataReader(directory, segment, fieldInfos);

		tis = new SegmentTermInfos(directory, segment, fieldInfos);

		if (hasDeletions(si))
		{
			deletedDocs = new BitVector(directory, segment + ".del");
		}

		// make sure that all index files have been read or are kept open
		// so that if an index update removes them we'll still have them
		freqStream = directory.openFile(segment + ".frq");
	}
  
	final synchronized void doClose() throws IOException
	{
		if (deletedDocsDirty)
		{
			deletedDocs.write(directory, segment + ".tmp");
			directory.renameFile(segment + ".tmp", segment + ".del");
			deletedDocsDirty = false;
		}

		fieldsReader.close();
		tis.close();

		if (freqStream != null)
		{
			freqStream.close();
		}
		if (closeDirectory) directory.close();
	}

	final static boolean hasDeletions(SegmentInfo si) throws IOException 
	{
		return si.dir.fileExists(si.name + ".del");
	}

	final synchronized void doDelete(int docNum) throws IOException 
	{
		if (deletedDocs == null)
		{
			deletedDocs = new BitVector(maxDoc());
		}
		deletedDocsDirty = true;
		deletedDocs.set(docNum);
	}

	final List<String> files() throws IOException 
	{
		List<String> files = new ArrayList<String>(16);
		files.add(segment + ".fdx");
		files.add(segment + ".fdt");
		files.add(segment + ".tii");
		files.add(segment + ".tis");
		files.add(segment + ".frq");

		if (directory.fileExists(segment + ".del"))
		{
			files.add(segment + ".del");
		}
    
		return files;
	}

	/** Returns an enumeration of all the Terms and TermInfos in the set. */
	public final SegmentTermInfos termInfos() throws IOException 
	{
		return new SegmentTermInfos(directory, segment, fieldInfos);
	}

	public final synchronized Document document(int n) throws IOException 
	{
		if (isDeleted(n))
		{
			throw new IllegalArgumentException ("attempt to access a deleted document");
		}
		return fieldsReader.doc(n);
	}

	public final synchronized boolean isDeleted(int n) 
	{
		return (deletedDocs != null && deletedDocs.get(n));
	}

	public final TermPostings termPostings() throws IOException 
	{
		return new SegmentTermPostings(this);
	}

	public final int docCount(Term t) throws IOException 
	{
		TermInfo ti = tis.seek(t);
		if (ti != null)
		{
			return ti.docCount;
		}
		else
		{
			return 0;
		}
	}

	public final int numDocs() 
	{
		int n = maxDoc();
		if (deletedDocs != null)
		{
			n -= deletedDocs.count();
		}
		return n;
	}

	public final int maxDoc() 
	{
		return fieldsReader.size();
	}
}