package org.apache.lucene.index;

import java.io.File;
import java.io.IOException;
import java.util.Properties;

import jdbm.RecordManager;
import jdbm.RecordManagerFactory;
import jdbm.htree.HTree;

import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.store.Directory;

public class TagIndex {
  public static final int DOCS_PER_BLOCK = 25000;
  public static final int LOWER_DOCS_PER_BLOCK = 20000;
  public static final int HIGHER_DOCS_PER_BLOCK = 30000;
  private int numDocsPerBlock;
  FieldDatabase fieldDatabase;
  Directory directory;
  File fileDirectory;
  int numBlocks;
  int[] docsPerBlock;
  int[] starts;
  int maxDoc;
  String rootSegment;
  TagFieldInfos tagFieldInfos; // TODO: keep cached version here

  public TagIndex(int maxDoc, String rootSegment) throws IOException {
    this.maxDoc = maxDoc;
    this.rootSegment = rootSegment;
    docsPerBlock = getDocBlocks(maxDoc);
    int total = 0;
    starts = new int[docsPerBlock.length + 1];
    for (int x = 0; x < docsPerBlock.length; x++) {
      starts[x] = total;
      total += starts[x];
    }
    starts[docsPerBlock.length] = total;
    fieldDatabase = new FieldDatabase();
  }
  
  public int addField(String name) throws IOException {
    return fieldDatabase.addTagFieldInfo(name);
  }
  
  public TagFieldInfos getTagFieldInfos() {
    return tagFieldInfos;
  }
  
  /**
   * public static int[] getDocBlocks(int maxDoc) { if (maxDoc <=
   * DOCS_PER_BLOCK) { return new int[] {maxDoc}; } int length =
   * (int)Math.ceil((double)maxDoc / (double)DOCS_PER_BLOCK);
   * System.out.println("length: "+length); int[] array = new int[length]; int
   * num = DOCS_PER_BLOCK; for (int x=0; x < array.length; x++) { array[x] =
   * num; num += array[x]; if (x == array.length - 1) {
   *  } } return array; }
   */

  public static int[] getDocBlocks(int maxDoc) {
    double r = (double) maxDoc / (double) DOCS_PER_BLOCK;
    // System.out.println("r: "+(int)r);
    double value = Math.floor(r);
    if ((maxDoc / value) > HIGHER_DOCS_PER_BLOCK) {
      value = Math.ceil(r);
    }
    // System.out.println("value: "+(int)value);
    double docsPerBlock = (double) maxDoc / value;
    // System.out.println("docsPerBlock: "+(int)docsPerBlock);
    int mod = maxDoc % (int) docsPerBlock;
    // System.out.println("mod: "+mod);
    int[] array = new int[(int) value];
    int num = (int) docsPerBlock;
    for (int x = 0; x < array.length; x++) {
      array[x] = (int) docsPerBlock;
      num += array[x];
    }
    if (mod > 0) {
      array[array.length - 1] += mod;
    }
    // for (int x=0; x < array.length; x++) {
    // System.out.println(array[x]);
    // }
    return array;
  }

  public int getNumDocsForBlock(int i) {
    return docsPerBlock[i];
  }

  /**
   * Uses JDBM hash database.
   * 
   */
  // TODO: make each value multiversioned
  public class FieldDatabase {
    public static final String TAG_FIELD_INFOS_KEY = "tagfieldinfoskey";
    private RecordManager recordManager;
    HTree hashtable;
    Object addTagFieldInfoLock = new Object();

    public FieldDatabase() throws IOException {
      Properties props = new Properties();
      File file = new File(fileDirectory, rootSegment + ".fdt");
      recordManager = RecordManagerFactory.createRecordManager(file.getAbsolutePath(), props);
      long recid = recordManager.getNamedObject("fieldata");
      if (recid != 0) {
        hashtable = HTree.load(recordManager, recid);
      } else {
        hashtable = HTree.createInstance(recordManager);
        recordManager.setNamedObject("fieldata", hashtable.getRecid());
      }
    }

    private int addTagFieldInfo(String fieldName) throws IOException {
      synchronized (addTagFieldInfoLock) {
        TagFieldInfos tagFieldInfos = getTagFieldInfos();
        int maxNum = tagFieldInfos.getMaxNum();
        int num = maxNum + 1;
        tagFieldInfos.add(fieldName, num);
        hashtable.put(TAG_FIELD_INFOS_KEY, tagFieldInfos);
        return num;
      }
    }

    private TagFieldInfos getTagFieldInfos() throws IOException {
      return (TagFieldInfos) hashtable.get(TAG_FIELD_INFOS_KEY);
    }

    public TagFieldData get(int doc, long version) throws IOException {
      byte[] bytes = (byte[]) hashtable.get(new Integer(doc));
      return new TagFieldData(bytes);
    }

    public void update(int doc, long version, TagFieldData fieldData) throws IOException {
      // TODO: merge old and new field data
      byte[] oldBytes = (byte[]) hashtable.get(new Integer(doc));
      TagFieldData oldData = new TagFieldData(oldBytes);
      hashtable.put(new Integer(doc), fieldData.getBytes());
    }
  }
}
