package org.apache.lucene.index;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.lucene.index.TagIndexSnapshot.TermEnumMergeQueue;

/**
 * Merges tag segments
 * 
 */
public class TagSegmentMerger {
  IndexReader[] readers;
  private SegmentMergeQueue queue = null;
  private int[][] docMaps;
  private int[] delCounts;
  private TagTermInfosWriter termInfosWriter = null;
  private int termNum = 0;
  private List<Integer> docs = new ArrayList<Integer>();

  public TagSegmentMerger(IndexReader[] readers) {
    this.readers = readers;
  }

  int[] getDelCounts() {
    return delCounts;
  }

  public void merge() throws IOException {
    TermEnumMergeQueue termEnumMergeQueue = new TermEnumMergeQueue(readers.length);
    for (int x = 0; x < readers.length; x++) {
      termEnumMergeQueue.insert(readers[x].terms());
    }
    mergeTermInfos();
  }

  private final int mergeTermInfo(SegmentMergeInfo[] smis, int n) throws CorruptIndexException, IOException {
    int[] docsArray = appendPostings(smis, n);
    if (docs.size() > 0) {
      termInfosWriter.add(smis[0].term, termNum, docsArray);
      termNum++;
    }
    return docsArray.length;
  }

  private final int[] appendPostings(SegmentMergeInfo[] smis, int n) throws CorruptIndexException, IOException {
    docs.clear();
    int lastDoc = 0;
    for (int i = 0; i < n; i++) {
      SegmentMergeInfo smi = smis[i];
      TermPositions postings = smi.getPositions();
      assert postings != null;
      int base = smi.base;
      int[] docMap = smi.getDocMap();
      postings.seek(smi.termEnum);
      while (postings.next()) {
        int doc = postings.doc();
        if (docMap != null)
          doc = docMap[doc]; // map around deletions
        doc += base; // convert to merged space
        lastDoc = doc;
        docs.add(doc);
      }
    }
    int[] array = new int[docs.size()];
    for (int x=0; x < docs.size(); x++) {
      array[x] = docs.get(x);
    }
    return array;
  }

  private final void mergeTermInfos() throws CorruptIndexException, IOException {
    int base = 0;
    final int readerCount = readers.length;
    for (int i = 0; i < readerCount; i++) {
      IndexReader reader = (IndexReader) readers[i];
      TermEnum termEnum = reader.terms();
      SegmentMergeInfo smi = new SegmentMergeInfo(base, termEnum, reader);
      int[] docMap = smi.getDocMap();
      if (docMap != null) {
        if (docMaps == null) {
          docMaps = new int[readerCount][];
          delCounts = new int[readerCount];
        }
        docMaps[i] = docMap;
        delCounts[i] = smi.reader.maxDoc() - smi.reader.numDocs();
      }
      base += reader.numDocs();
      if (smi.next())
        queue.put(smi); // initialize queue
      else
        smi.close();
    }
    SegmentMergeInfo[] match = new SegmentMergeInfo[readers.length];
    while (queue.size() > 0) {
      int matchSize = 0; // pop matching terms
      match[matchSize++] = (SegmentMergeInfo) queue.pop();
      Term term = match[0].term;
      SegmentMergeInfo top = (SegmentMergeInfo) queue.top();
      while (top != null && term.compareTo(top.term) == 0) {
        match[matchSize++] = (SegmentMergeInfo) queue.pop();
        top = (SegmentMergeInfo) queue.top();
      }
      final int df = mergeTermInfo(match, matchSize); // add new TermInfo
      // if (checkAbort != null)
      // checkAbort.work(df / 3.0);
      while (matchSize > 0) {
        SegmentMergeInfo smi = match[--matchSize];
        if (smi.next())
          queue.put(smi); // restore queue
        else
          smi.close(); // done with a segment
      }
    }
  }
}
