#ifndef __LEXICON_H_
#define __LEXICON_H_

#include "tokenizer.h"
#include "nolock.h"
#include "jobqueue.h"
#include "file.h"

struct Lexicon;
extern Lexicon * G_LEXICON;

// a dense mapping between integers and strings.
struct Lexicon {
  char *strings_; // contains all the strings, null terminated
  uint32 * id_string_; // strings_+id_string_[i] starts the i'th string
  NLHash<uint32> * string_id_;
  uint64 string_id_size_; 
  uint32 * string_id_hash_; 
  uint64 size_;
  
  // To store the counts for adjunct data structures
  uint64 * counts_;

  // once you know what strings to put in the lexicon, build it. 
  void Build(const vector<string> & strings);

  uint64 Size() { return size_;}
  uint64 StringsSize() {
    CHECK(size_ > 0);
    char * s = IDToString(size_ - 1);
    s += strlen((char *)s) + 1;
    return s - strings_;
  }
  uint32 StringToID(const char *begin, const char *end) {
    uint64 h = FingerprintCharArray(begin, end-begin);
    uint64 pos = h % string_id_size_;
    uint64 step = 0;
    while (1) {
      uint32 id = string_id_hash_[pos];
      if (id == size_) return size_;
      if ((!memcmp((const char *)strings_ + id_string_[id], 
		   begin, end-begin)) &&
	  (strings_[id_string_[id] + end - begin] == '\0')) return id;
      step++;
      pos = (pos + step) % string_id_size_;
    }
  }
  uint32 StringToID(const char * s) { 
    return StringToID(s, s + strlen(s));
  }
  char * IDToString(uint32 id) {
    return strings_ + id_string_[id];
  }

  string MemblockToString(Memblock m);

  void Store(string data_filename);
  bool Load(string fn); // returns false on failure

  void EasyLoad(string data_filename); // assumes an XML tokenizer
  void BuildFromDataFile(string data_filename); // assumes an XML tokenizer
  
  void BuildFromCorpus(Memblock block, JobQueue *jq);
  void Verify();

  void Encode(string data_filename);
  void Explore();
};

int lexicon_main();

#endif
