// Copyright 2006 PR1ME. All Rights Reserved.
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.Map.Entry;

public class Indexer implements WordParser {

  public static final class OutputException extends RuntimeException {
  }

  public Indexer(UserSettings settings) {
    this.settings = settings;
    this.wAnalyzer = settings.getWordAnalyzer();
  }

  public void endOfStream(int totalBytes) {
    if (curFileInfo == null) {
      throw new IllegalStateException();
    }

    curFileInfo.setCharacters(totalBytes);
    fileInfos.addLast(curFileInfo);
    curFileInfo = null;
  }

  public void finish(DataOutputStream out) throws IOException {
    if (curFileInfo != null) {
      throw new IllegalStateException();
    }

    // 1: Date of time of construction
    out.writeLong(new Date().getTime());

    // 2: Variable break characters
    CharacterAnalyzer characterAnalyzer = settings.getCharacterAnalyzer();
    for (int i = CharacterAnalyzer.NONBREAK_BEGIN; i < CharacterAnalyzer.NONBREAK_END; ++i) {
      if (characterAnalyzer.isBreakChar(i)) {
        out.writeByte(i);
      }
    }
    out.writeByte(0);

    // 3: stop word file
    String stopFileName = settings.getStopFileName();
    if (stopFileName != null) {
      out.writeUTF(new File(stopFileName).getAbsolutePath());
    } else {
      out.writeUTF("");
    }

    // 4: the construction directory
    String curDir = System.getProperty("user.dir");
    out.writeUTF(curDir);

    // 5: corpus stats
    long corpusChars = 0, corpusWords = 0;
    int corpusFiles = 0;
    for (Iterator it = fileInfos.iterator(); it.hasNext();) {
      FileInfo info = (FileInfo) it.next();
      corpusChars += info.getCharacters();
      corpusWords += info.getWords();
      ++corpusFiles;
    }
    out.writeLong(corpusChars);
    out.writeLong(corpusWords);
    out.writeInt(corpusFiles);

    // 6: each file path
    for (Iterator it = fileInfos.iterator(); it.hasNext();) {
      FileInfo info = (FileInfo) it.next();
      out.writeUTF(info.getFilePath());
    }

    // Create the master wordList for the corpus
    ArrayList wordList = createPrunedWordList();

    // 7: the corpus word list, preceded by its size
    int wordListCount = wordList.size();
    out.writeInt(wordListCount);
    for (int i = 0; i < wordListCount; ++i) {
      out.writeUTF((String) wordList.get(i));
    }

    // 8: each file's bit set
    BitWriter bitWriter = new BitWriter(out);
    for (Iterator it = fileInfos.iterator(); it.hasNext();) {
      FileInfo info = (FileInfo) it.next();
      BitSet bitSet = info.getBitSet();
      for (int i = 0; i < bitSet.size(); ++i) {
        bitWriter.write(bitSet.get(i));
      }
    }

    bitWriter.finish();
    out.close();
  }

  public void processWord(String word) {
    if (curFileInfo == null) {
      throw new IllegalStateException();
    }
    curFileInfo.countWord();
    word = Util.toLowerCase(word);
    if (wAnalyzer.isStopWord(word)) {
      return;
    }

    curFileInfo.add(word);
  }

  public void startNewFile(String fileName) {
    if (curFileInfo != null) {
      throw new IllegalStateException();
    }
    String absPath = new File(fileName).getAbsolutePath();
    curFileInfo = new FileInfo(absPath);
  }

  private ArrayList createPrunedWordList() {
    // Frequency: the number of distinct files a particular word appears in.

    // Merge all corpus words into a single map of word => frequency
    Map mergeMap = new HashMap();
    for (Iterator it = fileInfos.iterator(); it.hasNext();) {
      FileInfo info = (FileInfo) it.next();
      Set wordSet = info.getWordSet();
      for (Iterator word_it = wordSet.iterator(); word_it.hasNext();) {
        String word = (String) word_it.next();
        Integer freq = (Integer) mergeMap.get(word);
        if (freq == null) {
          freq = new Integer(1);
        } else {
          freq = new Integer(freq.intValue() + 1);
        }
        mergeMap.put(word, freq);
      }
    }

    // DEBUG: Count the words in each frequency bucket
    // int[] counts = new int[fileInfos.size()];
    // for (Iterator it1 = mergeMap.values().iterator(); it1.hasNext();) {
    // Integer freq1 = (Integer) it1.next();
    // ++counts[freq1.intValue() - 1];
    // }
    //
    // for (int i = 0; i < counts.length; ++i) {
    // System.out.println("Occurring in " + (i + 1) + " files: " + counts[i]);
    // }

    // System.out.println(mergeMap.size() + " total unique words");

    // Now filter out any words that appear in every file, or only one file
    for (Iterator it = mergeMap.entrySet().iterator(); it.hasNext();) {
      Map.Entry e = (Entry) it.next();
      Integer freq = (Integer) e.getValue();
      if (freq.intValue() < 2 || freq.intValue() == fileInfos.size()) {
        it.remove();
      }
    }

    // System.out.println(mergeMap.size() + " words that passed the filter");

    // Now sort the words alphabetically via insertion sort
    Set sortedSet = new TreeSet();
    sortedSet.addAll(mergeMap.keySet());
    ArrayList sortedList = new ArrayList();
    sortedList.addAll(sortedSet);

    // Use the list to create bitfields for each FileInfo
    int tot_card = 0;
    for (Iterator it = fileInfos.iterator(); it.hasNext();) {
      FileInfo info = (FileInfo) it.next();
      info.createBitSet(sortedList);
      int cardinality = info.getBitSet().cardinality();
      // System.out.println("Cardinality: " + cardinality);
      tot_card += cardinality;
    }
    // System.out.println("Average: " + (tot_card / fileInfos.size()));

    return sortedList;
  }

  private FileInfo curFileInfo = null;
  private final LinkedList fileInfos = new LinkedList();
  private final UserSettings settings;
  private final WordAnalyzer wAnalyzer;
}
