package org.apache.lucene.index;

import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;

import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.apache.lucene.index.TagIndexSnapshot.Log.TransactionLogTermEnum;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.LimitedIndexInput;
import org.apache.lucene.store.TagRAMOutputStream;
import org.apache.lucene.util.BitVector;
import org.apache.lucene.util.OpenBitSet;
import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.cache.SimpleLRUCache;

/**
 * Snapshot of of the tag index 
 */
//TODO: add lrucache for blocks, tagblockinfos, not sure what to do for different snapshots
//TODO: create system to create clean index
public class TagIndexSnapshot implements Cloneable {
  private Log log;
  private Index index;
  BitVector deletedDocs;
  TagIndex tagIndex;
  private long version;
  private String segment;
  private ReentrantLock newTagIndexSnapshotLock = new ReentrantLock();
  private int maxSizeBlockCache;
  private int maxSizeBlockInfosCache;

  public TagIndexSnapshot(String segment, long version, Log log, BitVector deletedDocs, int maxSizeBlockCache, int maxSizeBlockInfosCache) throws IOException {
    this.segment = segment;
    this.version = version;
    this.deletedDocs = deletedDocs;
    this.log = log;
    this.maxSizeBlockCache = maxSizeBlockCache;
    this.maxSizeBlockInfosCache = maxSizeBlockInfosCache;
  }
  
  public boolean hasDeletions() {
    return deletedDocs != null;
  }
  
  public void close() throws IOException {
    if (log != null) log.close();
    if (index != null) index.close();
  }
  
  public int numDocs() {
    return tagIndex.maxDoc - deletedDocs.size();
  }

  public Term getTerm(int num, boolean tlog) throws IOException {
    if (tlog) {
      return log.getTermByNum(num);
    } else {
      return index.getTermByNum(num);
    }
  }

  public Document getDocument(int n, FieldSelector fieldSelector) throws IOException {
    TagFieldInfos tagFieldInfos = tagIndex.getTagFieldInfos();
    TagFieldData tagFieldData = tagIndex.fieldDatabase.get(n, version);
    Document document = new Document();
    for (int x = 0; x < tagFieldData.fields.length; x++) {
      String fieldName = tagFieldInfos.fieldName(tagFieldData.fields[x].fieldNum);
      if (fieldSelector == null || fieldSelector.accept(fieldName).equals(FieldSelectorResult.LOAD)) {
        Term term = getTerm(tagFieldData.fields[x].termNum, tagFieldData.fields[x].tlog);
        document.add(new Field(fieldName, term.text(), Field.Store.YES, Field.Index.UN_TOKENIZED));
      }
    }
    return document;
  }

  public TagBlockInfos getTagBlockInfos(Term term) throws IOException {
    TagTermInfo tagTermInfo = getTagTermInfo(term);
    return getTagBlockInfos(tagTermInfo);
  }

  public TagBlockInfos getTagBlockInfos(TagTermInfo tagTermInfo) throws IOException {
    if (tagTermInfo instanceof TermDiskInfo) {
      return log.getTagBlockInfos((TermDiskInfo) tagTermInfo);
    } else {
      return index.getTagBlockInfos(tagTermInfo);
    }
  }

  public TagTermInfo getTagTermInfo(Term term) throws IOException {
    TagTermInfo tagTermInfo = log.getTagTermInfo(term);
    if (tagTermInfo != null)
      return tagTermInfo;
    return index.getTagTermInfo(term);
  }

  public TermPositions termDocs() {
    return new TagTermDocs(this, tagIndex.starts, deletedDocs, index.getInput(), log.input, index.termInfosReader.getSkipInterval(),
        index.termInfosReader.getMaxSkipLevels());
  }

  public TagMultiTermEnum terms() throws IOException {
    TagSegmentTermEnum tagSegmentTermEnum = index.terms();
    TransactionLogTermEnum logTermEnum = log.terms();
    return new TagMultiTermEnum(tagSegmentTermEnum, logTermEnum, null, log.deletedIndexTerms);
  }

  public TagMultiTermEnum terms(Term term) throws IOException {
    TagSegmentTermEnum tagSegmentTermEnum = index.terms(term);
    TransactionLogTermEnum logTermEnum = log.terms(term);
    return new TagMultiTermEnum(tagSegmentTermEnum, logTermEnum, term, log.deletedIndexTerms);
  }

  /**
   * Copy this snapshot increment version
   */
  public TagIndexSnapshot newTagIndexSnapshot(String segment, BitVector deleted) throws IOException {
    newTagIndexSnapshotLock.lock();
    try {
      long newVersion = version + 1;
      if (deleted == null) {
        deleted = deletedDocs;
      }
      Log newLog = (Log) log.copy();
      return new TagIndexSnapshot(segment, newVersion, newLog, deleted, maxSizeBlockCache, maxSizeBlockInfosCache);
    } finally {
      newTagIndexSnapshotLock.unlock();
    }
  }

  public long getVersion() {
    return version;
  }

  // TODO: store term as bytes with sorted field num as first byte
  public static class TermDiskInfo extends TagTermInfo implements TagWriteable {
    public Term term;

    public TermDiskInfo() {
    }

    public TermDiskInfo(IndexInput input) throws IOException {
      docFreq = input.readVInt();
      termNum = input.readVInt();
      blockInfosPointer = input.readVLong();
      String field = input.readString();
      String text = input.readString();
      term = new Term(field, text);
    }

    public void write(IndexOutput output) throws IOException {
      output.writeVInt(docFreq);
      output.writeVInt(termNum);
      output.writeVLong(blockInfosPointer);
      output.writeString(term.field());
      output.writeString(term.text());
    }
  }

  public static class Record {
    public int type;
    public IndexInput input;
    public int length;

    public Record(int type, int length, IndexInput input) {
      this.type = type;
      this.length = length;
      this.input = input;
    }

    public void skip() throws IOException {
      input.seek(input.getFilePointer() + length);
    }
  }

  /**
   * Write record methods return pointer to the actual data
   * 
   */
  // TODO: add lrucache of tagblockinfos and tagblocks
  // TODO: create logrecordreader
  public class Log {
    public static final long CURRENT_FORMAT = 1;
    public static final int TERM_DISK_INFO = 4;
    public static final int BLOCK_DATA = 10;
    public static final int BLOCK_INFOS = 7;
    private ArrayList<TermDiskInfo> termDiskInfos = new ArrayList<TermDiskInfo>(); // sorted
    // by
    // term
    private ArrayList<TermDiskInfo> numTermDiskInfos = new ArrayList<TermDiskInfo>(); // sorted
    // by
    // termnum.
    // for
    // log
    // termnum is an id
    private HashSet<Integer> deletedIndexTerms; // terms from index that are not
    private IndexInput input;
    private IndexOutput output;
    int skipInterval;
    int maxSkipLevels;
    int termIdSequence = 0;
    private TermNumDiskComparator termNumDiskComparator = new TermNumDiskComparator();
    private TermDiskComparator termDiskComparator = new TermDiskComparator();
    private ReentrantLock addTermDiskInfoLock = new ReentrantLock();
    private String fileName;
    private TagBlockLRUCache tagBlockLRUCache;
    private SimpleLRUCache tagBlockInfosCache;
    
    private Log() {
    }
    
    public Log(int skipInterval, int maxSkipLevels) throws IOException {
      this.skipInterval = skipInterval;
      this.maxSkipLevels = maxSkipLevels;
      tagBlockLRUCache = new TagBlockLRUCache(maxSizeBlockCache);
      tagBlockInfosCache = new SimpleLRUCache(maxSizeBlockInfosCache);
      fileName = segment + "." + version + ".tlg";
      output = tagIndex.directory.createOutput(fileName);
      input = tagIndex.directory.openInput(fileName);
      if (output.length() > 0) {
        load();
      } else {
        // TODO: write out headers
        output.writeLong(CURRENT_FORMAT);
      }
    }

    public TagBlockInfos getTagBlockInfos(TermDiskInfo tdi) throws IOException {
      TagBlockInfos tagBlockInfos = (TagBlockInfos)tagBlockInfosCache.get(tdi.termNum);
      if (tagBlockInfos != null) {
        return tagBlockInfos;
      }
      IndexInput i = (IndexInput) input.clone();
      try {
        return new TagBlockInfos(tdi.termNum, true, tdi.blockInfosPointer, i, 0);
      } finally {
        i.close();
      }
    }

    public TagTermInfo getTagTermInfo(Term term) {
      TransactionLogTermEnum termEnum = terms(term);
      TagTermInfo tagTermInfo = termEnum.termDiskInfo();
      return tagTermInfo;
    }

    public Log copy() {
      Log log = new Log();
      log.termDiskInfos = (ArrayList) termDiskInfos.clone();
      log.numTermDiskInfos = (ArrayList) numTermDiskInfos.clone();
      log.deletedIndexTerms = (HashSet) deletedIndexTerms.clone();
      log.tagBlockLRUCache = tagBlockLRUCache;
      log.skipInterval = skipInterval;
      log.maxSkipLevels = maxSkipLevels;
      return log;
    }

    public synchronized TermDiskInfo addTermDiskInfo(Term term, long blockInfosPointer, int docFreq) throws IOException {
      TermDiskInfo key = new TermDiskInfo();
      key.term = term;
      int index = Collections.binarySearch(termDiskInfos, key, termNumDiskComparator);
      if (index < 0)
        index = -1 - index;
      TermDiskInfo termDiskInfo = new TermDiskInfo();
      termDiskInfo.blockInfosPointer = blockInfosPointer;
      termDiskInfo.docFreq = docFreq;
      termDiskInfo.term = term;
      termDiskInfo.termNum = termIdSequence;
      termDiskInfos.add(index, termDiskInfo);
      int numIndex = Collections.binarySearch(numTermDiskInfos, key, termNumDiskComparator);
      if (numIndex < 0)
        numIndex = -1 - numIndex;
      numTermDiskInfos.add(numIndex, termDiskInfo);
      termIdSequence++;
      writeRecord(termDiskInfo);
      return termDiskInfo;
    }

    public void close() throws IOException {
      if (output != null)
        output.close();
    }

    public long writeRecord(int type, TagWriteable writeable) throws IOException {
      TagRAMOutputStream byteOutput = new TagRAMOutputStream();
      writeable.write(byteOutput);
      return writeRecord(type, byteOutput);
    }

    public long writeRecord(int type, TagRAMOutputStream byteOutput) throws IOException {
      output.writeVInt(type);
      int length = (int) output.length();
      byteOutput.writeVInt(length);
      long pointer = output.getFilePointer();
      byteOutput.writeTo(output);
      return pointer;
    }

    public long writeRecord(byte type, byte[] bytes) throws IOException {
      output.writeVInt(type);
      long pointer = output.getFilePointer();
      output.writeVInt(bytes.length);
      output.writeBytes(bytes, bytes.length);
      return pointer;
    }

    public Record readRecord() throws IOException {
      int type = input.readVInt();
      int length = input.readVInt();
      LimitedIndexInput limitedIndexInput = new LimitedIndexInput(length, input);
      return new Record(type, length, limitedIndexInput);
    }

    public long writeRecord(TermDiskInfo termDiskInfo) throws IOException {
      return writeRecord(TERM_DISK_INFO, termDiskInfo);
    }

    public long writeRecord(TagBlockInfos blockInfos) throws IOException {
      return writeRecord(BLOCK_INFOS, blockInfos);
    }

    /**
     * public long writeRecord(TagBlockInfo tagBlockInfo) throws IOException {
     * return writeRecord(BLOCK_INFO, tagBlockInfo); }
     * 
     * public long writeRecord(TagBlockInfo[] blockInfoArray) throws IOException {
     * ByteArrayIndexOutput2 byteOutput = new ByteArrayIndexOutput2();
     * byteOutput.writeVInt(blockInfoArray.length); for (int x=0; x <
     * blockInfoArray.length; x++) { blockInfoArray[x].write(byteOutput); }
     * return writeRecord(BLOCK_INFOS_ARRAY, byteOutput); }
     */
    public long writeRecord(TagBlockWriter tagBlockWriter) throws IOException {
      return writeRecord(BLOCK_DATA, tagBlockWriter);
    }

    public void load() throws IOException {
      Map<Integer,TermDiskInfo> termInfoMap = new HashMap<Integer,TermDiskInfo>(2000);
      long formatVersion = input.readLong();
      while (true) {
        try {
          Record record = readRecord();
          if (record.type == TERM_DISK_INFO) {
            TermDiskInfo tdi = new TermDiskInfo(input);
            termInfoMap.put(tdi.termNum, tdi);
          } else {
            record.skip();
          }
        } catch (EOFException eofException) {
          break;
        }
      }
      termDiskInfos.ensureCapacity(termInfoMap.size());
      numTermDiskInfos.ensureCapacity(termInfoMap.size());
      for (TermDiskInfo termDiskInfo : termInfoMap.values()) {
        termDiskInfos.add(termDiskInfo);
        numTermDiskInfos.add(termDiskInfo);
      }
      Collections.sort(termDiskInfos, termDiskComparator);
      Collections.sort(numTermDiskInfos, termNumDiskComparator);
    }

    public synchronized void update(Term term, DocIdSet updates, boolean add) throws IOException {
      IndexInput logInput = (IndexInput) input.clone();
      IndexInput indexInput = null;
      int numBlocks = tagIndex.numBlocks;
      try {
        int[] starts = tagIndex.starts;
        OpenBitSet[] updateBitSets = getAffectedBlocks(updates, starts);
        TagBlockInfos tbi = TagIndexSnapshot.this.getTagBlockInfos(term);
        TagBlockInfos newTbi = new TagBlockInfos();
        newTbi.termNum = tbi.termNum; 
        newTbi.positions = tbi.positions; // TODO: need to copy arrays because they get altered
        newTbi.docFreqs = tbi.docFreqs;
        newTbi.tlog = true;
        newTbi.tlogs = tbi.tlogs;
        int docFreq = 0;
        TagBlockMerger[] tagBlockMergers = new TagBlockMerger[numBlocks];
        for (int x = 0; x < numBlocks; x++) {
          if (updateBitSets[x] != null) {
            IndexInput input = indexInput;
            if (tbi.tlogs[x])
              input = logInput;
            TagBlockTermDocs tagBlockTermDocs = new TagBlockTermDocs(x, tbi, deletedDocs, input, skipInterval, maxSkipLevels);
            tagBlockMergers[x] = new TagBlockMerger(tagBlockTermDocs, tbi.docFreqs[x], skipInterval, maxSkipLevels);
            int blockDocFreq = tagBlockMergers[x].merge(add, updateBitSets[x]);
            docFreq += blockDocFreq;
          }
        }
        for (int x = 0; x < numBlocks; x++) {
          if (tagBlockMergers[x] != null) {
            long position = writeRecord(tagBlockMergers[x].tagBlockData); // write
            tagBlockLRUCache.put(term, x, tagBlockMergers[x].tagBlockData);
            newTbi.positions[x] = position;
            newTbi.docFreqs[x] = tagBlockMergers[x].docFreq;
            newTbi.tlogs[x] = true;
          }
        }
        long blockInfosPointer = writeRecord(newTbi); // write tagblockinfos
        tagBlockInfosCache.put(newTbi.termNum, newTbi);
        TermDiskInfo termDiskInfo = addTermDiskInfo(term, blockInfosPointer, docFreq); // write
        // terminfo
      } finally {
        logInput.close();
      }
    }

    public OpenBitSet[] getAffectedBlocks(DocIdSet updates, int[] starts) throws IOException {
      DocIdSetIterator iterator = updates.iterator();
      int pos = 0;
      OpenBitSet[] blocks = new OpenBitSet[starts.length - 1];
      while (iterator.next()) {
        int doc = iterator.doc();
        while (pos < blocks.length) {
          if (starts[pos] <= doc && doc < starts[pos + 1]) {
            if (blocks[pos] == null)
              blocks[pos] = new OpenBitSet(tagIndex.getNumDocsForBlock(pos));
            blocks[pos].set(doc - starts[pos]);
            break;
          } else {
            pos++;
          }
        }
      }
      return blocks;
    }

    public Term getTermByNum(int termNum) {
      TermDiskInfo key = new TermDiskInfo();
      key.termNum = termNum;
      int index = Collections.binarySearch(numTermDiskInfos, key, termNumDiskComparator);
      if (index < 0)
        return null;
      return ((TermDiskInfo) numTermDiskInfos.get(index)).term;
    }

    public boolean isIndexTermDeleted(int num) {
      return deletedIndexTerms.contains(num);
    }

    public TransactionLogTermDocs termDocs() {
      return new TransactionLogTermDocs(deletedDocs, skipInterval, maxSkipLevels);
    }

    /**
     * private TagBlockInfo[] loadBlockInfos(long blockInfosPointer) throws
     * IOException { TagBlockInfos tagBlockInfos = new TagBlockInfos(true,
     * blockInfosPointer, input); TagBlockInfo[] blockInfos =
     * TagBlockInfos.load(true, tagBlockInfos, input); return blockInfos; }
     */
    public class TransactionLogTermDocs extends TagBaseBlocksTermDocs {
      IndexInput input;

      public TransactionLogTermDocs(BitVector deletedDocs, int skipInterval, int maxSkipLevels) {
        super(TagIndexSnapshot.this, deletedDocs, skipInterval, maxSkipLevels);
      }

      public void seek(Term term) throws IOException {
        TransactionLogTermEnum termEnum = terms(term);
        seek(termEnum);
      }

      public void seek(TermEnum termEnum) throws IOException {
        TransactionLogTermEnum transactionLogTermEnum = (TransactionLogTermEnum) termEnum;
        TermDiskInfo termDiskInfo = transactionLogTermEnum.termDiskInfo();
        TagBlockInfos tagBlockInfos = new TagBlockInfos(termDiskInfo.termNum, true, termDiskInfo.blockInfosPointer, input, 0);
        seek(tagBlockInfos, input, term);
      }
    }
    
    public TransactionLogTermEnum terms() {
      return new TransactionLogTermEnum(null);
    }
    
    public TransactionLogTermEnum terms(Term term) {
      return new TransactionLogTermEnum(term);
    }

    public class TransactionLogTermEnum extends TagTermEnum {
      int pos;

      private TransactionLogTermEnum(Term fromTerm) {
        if (fromTerm == null) {
          pos = 0;
        } else {
          TermDiskInfo from = new TermDiskInfo();
          from.term = fromTerm;
          pos = Collections.binarySearch(termDiskInfos, from, new TermDiskComparator());
          if (pos < 0) {
            pos = -1 - pos;
          }
        }
      }

      public boolean next() throws IOException {
        if (pos < termDiskInfos.size() - 1) {
          pos++;
          return true;
        }
        return false;
      }

      public int termNum() {
        return termDiskInfo().termNum;
      }

      public TagTermInfo termInfo() {
        return termDiskInfo();
      }

      public TermDiskInfo termDiskInfo() {
        return (TermDiskInfo) termDiskInfos.get(pos);
      }

      public Term term() {
        return termDiskInfo().term;
      }

      public int docFreq() {
        return 1;
      }

      public void close() throws IOException {
      }
    }
  }

  public static class TermDiskComparator implements Comparator {
    public int compare(Object o1, Object o2) {
      TermDiskInfo t1 = (TermDiskInfo) o1;
      TermDiskInfo t2 = (TermDiskInfo) o2;
      return t1.term.compareTo(t2.term);
    }
  }

  public static class TermNumDiskComparator implements Comparator {
    public int compare(Object o1, Object o2) {
      TermDiskInfo t1 = (TermDiskInfo) o1;
      TermDiskInfo t2 = (TermDiskInfo) o2;
      if (t1.termNum > t2.termNum)
        return 1;
      else if (t1.termNum < t2.termNum)
        return -1;
      return 0;
    }
  }

  public class Index {
    private TagTermInfosReader termInfosReader;

    public Index(TagFieldInfos tagFieldInfos) throws IOException {
      termInfosReader = new TagTermInfosReader(tagIndex.directory, segment, tagFieldInfos);
    }
    
    public void close() throws IOException {
      termInfosReader.close();
    }
    
    public TagBlockInfos getTagBlockInfos(TagTermInfo tti) throws IOException {
      IndexInput i = (IndexInput) termInfosReader.getInput().clone();
      try {
        return new TagBlockInfos(tti.termNum, false, tti.blockInfosPointer, i, tti.basePointer);
      } finally {
        i.close();
      }
    }

    public TagTermInfo getTagTermInfo(Term term) throws IOException {
      TagSegmentTermEnum termEnum = terms(term);
      return termEnum.termInfo();
    }

    public IndexInput getInput() {
      return termInfosReader.getInput();
    }

    public Term getTermByNum(int termNum) throws IOException {
      return termInfosReader.getByNum(termNum).term;
    }

    public TagSegmentTermEnum terms() throws IOException {
      return termInfosReader.terms();
    }

    public TagSegmentTermEnum terms(Term term) throws IOException {
      return termInfosReader.terms(term);
    }
  }

  public class TagMultiTermEnum extends TagTermEnum {
    private TermEnumMergeQueue queue;
    private TagTermInfo termInfo;
    private Term term;
    private int docFreq = 1;
    private Set<Integer> indexDeletedTermNums;

    public TagMultiTermEnum(TagSegmentTermEnum tagSegmentTermEnum, TransactionLogTermEnum transactionLogTermEnum, Term t,
        Set<Integer> indexDeletedTermNums) throws IOException {
      this.indexDeletedTermNums = indexDeletedTermNums;
      queue = new TermEnumMergeQueue(2);
      initTermEnum(t, tagSegmentTermEnum);
      initTermEnum(t, transactionLogTermEnum);
      if (t != null && queue.size() > 0) {
        next();
      }
    }

    public int termNum() {
      return termInfo.termNum;
    }

    TagTermInfo termInfo() {
      return termInfo;
    }

    private void initTermEnum(Term t, TermEnum termEnum) throws IOException {
      if (t == null ? termEnum.next() : termEnum.term() != null)
        queue.put(termEnum); // initialize queue
      else
        termEnum.close();
    }

    public boolean next() throws IOException {
      TermEnum top = (TermEnum) queue.top();
      if (top == null) {
        term = null;
        return false;
      }
      term = top.term();
      TagTermInfo logTagTermInfo = null;
      TagTermInfo indexTagTermInfo = null;
      while (top != null && term.compareTo(top.term()) == 0) {
        queue.pop();
        if (top instanceof TagSegmentTermEnum) {
          logTagTermInfo = ((TagSegmentTermEnum) top).termInfo();
        } else {
          indexTagTermInfo = ((TransactionLogTermEnum) top).termDiskInfo();
        }
        if (top.next()) {
          queue.put(top);
        } else
          top.close();
        top = (TermEnum) queue.top();
      }
      if (logTagTermInfo != null)
        termInfo = logTagTermInfo;
      else
        termInfo = indexTagTermInfo;
      // skip deleted terms
      if (termInfo == indexTagTermInfo && indexDeletedTermNums.contains(termInfo.termNum)) {
        return next();
      }
      return true;
    }

    public Term term() {
      return term;
    }

    public int docFreq() {
      return docFreq;
    }

    public void close() throws IOException {
      queue.close();
    }
  }

  public OpenBitSet[] separateDocs(OpenBitSet changes, int[] starts) {
    int numBlocks = starts.length - 1;
    OpenBitSet[] bitSets = new OpenBitSet[numBlocks];
    for (int x = 0; x < starts.length - 1; x++) {
      int blockLength = tagIndex.getNumDocsForBlock(x);
      bitSets[x] = docs(changes, starts[x], starts[x + 1] - 1, blockLength);
    }
    return bitSets;
  }

  public static OpenBitSet docs(OpenBitSet changes, int start, int end, int blockLength) {
    OpenBitSet bitSet = null;
    int pos = 0;
    for (int x = start; x <= end; x++) {
      if (changes.get(x)) {
        if (bitSet == null)
          bitSet = new OpenBitSet(blockLength);
        bitSet.set(pos);
      }
    }
    return bitSet;
  }

  public Document document(int n) throws CorruptIndexException, IOException {
    TagFieldData fieldsData = tagIndex.fieldDatabase.get(n, version);
    Document document = new Document();
    for (int x = 0; x < fieldsData.fields.length; x++) {
      String name = tagIndex.getTagFieldInfos().fieldName(fieldsData.fields[x].fieldNum);
      Term term = null;
      if (fieldsData.fields[x].tlog) {
        term = log.getTermByNum(fieldsData.fields[x].termNum);
      } else {
        term = index.getTermByNum(fieldsData.fields[x].termNum);
      }
      assert name.equals(term.field());
      document.add(new Field(name, term.text(), Field.Store.YES, Field.Index.UN_TOKENIZED));
    }
    return document;
  }

  public final static class TermEnumMergeQueue extends PriorityQueue {
    TermEnumMergeQueue(int size) {
      initialize(size);
    }

    protected final boolean lessThan(Object a, Object b) {
      TermEnum stiA = (TermEnum) a;
      TermEnum stiB = (TermEnum) b;
      int comparison = stiA.term().compareTo(stiB.term());
      if (comparison == 0) {
        if (stiA instanceof TagSegmentTermEnum)
          return true;
        else
          return false;
      } else
        return comparison < 0;
    }

    final void close() throws IOException {
      while (top() != null)
        ((TermEnum) pop()).close();
    }
  }
}
