package org.newlucene.core.index;

import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
import java.io.IOException;

import org.newlucene.core.store.Directory;
import org.newlucene.core.store.OutputStream;
import org.newlucene.core.util.BitVector;

final class SegmentMerger
{
	private Directory directory;
	private String segment;
	private List<SegmentReader> readers = new ArrayList<SegmentReader>();
	private FieldInfos fieldInfos;

	private OutputStream freqStream = null;
	private TermInfosWriter termInfosWriter = null;
	private SegmentMergeQueue queue = null;

	private final TermInfo termInfo = new TermInfo(); // minimize consing
	
	SegmentMerger(Directory dir, String name, FieldInfos fieldInfos)
	{
		directory = dir;
		segment = name;
		this.fieldInfos = fieldInfos;
	}

	final void add(SegmentReader reader)
	{
		readers.add(reader);
	}

	final SegmentReader segmentReader(int i)
	{
		return (SegmentReader) readers.get(i);
	}

	final void merge() throws IOException
	{
		try
		{
			mergeFieldData();
			mergeTerms();
		}
		finally
		{
			for (int i = 0; i < readers.size(); i++)
			{ // close readers
				SegmentReader reader = (SegmentReader) readers.get(i);
				reader.close();
			}
		}
	}

	private final void mergeFieldData() throws IOException
	{
		// merge field values
		FieldDataWriter fieldsWriter = new FieldDataWriter(directory, segment, fieldInfos);
		try
		{
			for (int i = 0; i < readers.size(); i++)
			{
				SegmentReader reader = (SegmentReader) readers.get(i);
				BitVector deletedDocs = reader.deletedDocs;
				int maxDoc = reader.maxDoc();
				for (int j = 0; j < maxDoc; j++)
				{
					if (deletedDocs == null || !deletedDocs.get(j)) // skip deleted docs
					{
						fieldsWriter.addDocument(reader.document(j));
					}
				}
			}
		}
		finally
		{
			fieldsWriter.close();
		}
	}

	private final void mergeTerms() throws IOException
	{
		try
		{
			freqStream = directory.createFile(segment + ".frq");
			termInfosWriter = new TermInfosWriter(directory, segment, fieldInfos);
			mergeTermInfos();
		}
		finally
		{
			if (freqStream != null) freqStream.close();
			if (termInfosWriter != null) termInfosWriter.close();
			if (queue != null) queue.close();
		}
	}

	private final void mergeTermInfos() throws IOException
	{
		queue = new SegmentMergeQueue(readers.size());
		int base = 0;
		for (int i = 0; i < readers.size(); i++)
		{
			SegmentReader reader = (SegmentReader) readers.get(i);
			SegmentTermInfos tis = (SegmentTermInfos) reader.termInfos();
			SegmentMergeInfo smi = new SegmentMergeInfo(base, tis, reader);
			base += reader.numDocs();
			if (smi.next())
			{
				queue.put(smi); // initialize queue
			}
			else
			{
				smi.close();
			}
		}

		SegmentMergeInfo[] match = new SegmentMergeInfo[readers.size()];

		while (queue.size() > 0)
		{
			int matchSize = 0; // pop matching terms
			match[matchSize++] = (SegmentMergeInfo) queue.pop();
         	Term term = match[0].term;
         	SegmentMergeInfo top = (SegmentMergeInfo) queue.top();

         	while (top != null && term.compareTo(top.term) == 0)
         	{
         		match[matchSize++] = (SegmentMergeInfo) queue.pop();
         		top = (SegmentMergeInfo) queue.top();
         	}

         	mergeTermInfo(match, matchSize); // add new TermInfo

         	while (matchSize > 0)
         	{
         		SegmentMergeInfo smi = match[--matchSize];
         		if (smi.next())
         		{
         			queue.put(smi); // restore queue
         		}
         		else
         		{
         			smi.close(); // done with a segment
         		}
         	}
		}
	}

	private final void mergeTermInfo(SegmentMergeInfo[] smis, int n) throws IOException
	{
		long freqPointer = freqStream.getFilePointer();
      
		int df = appendPostings(smis, n); // append posting data

		if (df > 0)
		{
			// add an entry to the dictionary with pointers to freq files
			termInfo.set(df, freqPointer);
			termInfosWriter.add(smis[0].term, termInfo);
		}
	}

	private final int appendPostings(SegmentMergeInfo[] smis, int n) throws IOException
	{
		int lastDoc = 0;
		int docCount = 0; // number of docs w/ term
		for (int i = 0; i < n; i++)
		{
			SegmentMergeInfo smi = smis[i];
			SegmentTermPostings postings = smi.postings;
			int base = smi.base;
			int[] docMap = smi.docMap;
			postings.seek(smi.term);
			boolean isPos = fieldInfos.getFieldInfo(smi.term.field).isPos();
			while (postings.nextDoc())
			{
				int doc;
				if (docMap == null)
				{
					doc = base + postings.doc; // no deletions
				}
				else
				{
					doc = base + docMap[postings.doc]; // re-map around deletions
				}

				if (doc < lastDoc)
				{
					throw new IllegalStateException("docs out of order");
				}

				int docCode = (doc - lastDoc) << 1; // use low bit to flag freq=1
				lastDoc = doc;

				int freq = postings.freq();
				if (freq == 1)
				{
					freqStream.writeVInt(docCode | 1); // write doc & freq=1
				}
				else
				{
					freqStream.writeVInt(docCode); // write doc
					freqStream.writeVInt(freq); // write frequency in doc
				}

				if (isPos)
				{
					int lastPosition = 0; // write position deltas
					for (int j = 0; j < freq; j++)
					{
						int position = postings.nextPosition();
						freqStream.writeVInt(position - lastPosition);
						lastPosition = position;
					}					
				}

				docCount++;
			}
		}
		return docCount;
	}
}
