#include "lexicon.h"

Lexicon * G_LEXICON = NULL;

const uint64 kLexiconMaxSize = (1 << 24);

// This has to be written to a regular file
void Lexicon::Store(string data_filename) {
  string fn = data_filename + ".lexicon";
  ofstream output(fn.c_str(), ofstream::binary);
  Write(&output, size_);
  WriteSizePrefixed(&output, (const char *) strings_, StringsSize());
  WriteSizePrefixed(&output, (const char *) id_string_, 
		    size_ * sizeof(uint32));
  Write(&output, string_id_size_);
  WriteSizePrefixed(&output, (const char *) string_id_hash_, 
		    string_id_size_ * sizeof(uint32));
  // Store the counts also
  string fn2 = data_filename + ".token_counts";
  ofstream out2(fn2.c_str(), ostream::binary);
  out2.write((const char*)counts_, size_ * sizeof(uint64));
}

// This can be read from a sharded file
bool Lexicon::Load(string fn) {
  CHECK(!G_LEXICON);
  G_LEXICON = this;
  if (!File::Exists(fn)) {
    cout << "Missing lexicon file " << fn << endl;
    CHECK(false);
  }
  Memblock mb = File::Read(fn);
  // cout << "Lexicon memblock size:" << mb.size_ << endl;
  const char *p = mb.begin();
  Read(&p, &size_);
  strings_ = (char *) ReadSizePrefixed(&p);
  id_string_ = (uint32 *) ReadSizePrefixed(&p);
  Read(&p, &string_id_size_);
  string_id_hash_ = (uint32 *) ReadSizePrefixed(&p);
  return true;
}

void Lexicon::EasyLoad(string data_filename) {
  string lexicon_filename = data_filename + ".lexicon";
  if (!File::Exists(lexicon_filename)) {
    BuildFromDataFile(data_filename);
    Store(data_filename);
  }
  Load(lexicon_filename);
}

void Lexicon::BuildFromDataFile(string data_filename) {
  Memblock block = Mmap2(data_filename);
  BuildFromCorpus(block, G_JQ);
  Verify();
}

void Lexicon::Build(const vector<string> &strings) {
  uint32 n = strings.size();
  size_ = n;
  uint64 length_sum = 0;
  for (uint32 i=0; i<strings.size(); i++) length_sum += strings[i].size();
  length_sum += n;
  CHECK(length_sum == uint32(length_sum));
  strings_ = new char[length_sum];
  id_string_ = new uint32[n];
  string_id_size_ = 2 * size_ + 1;
  string_id_hash_ = new uint32[string_id_size_];
  for (uint32 i=0; i<string_id_size_; i++) string_id_hash_[i] = size_;
  uint32 strings_pos = 0;
  // cout << "n = " << n << endl;
  for (uint32 i=0; i<n; i++) {
    // cout << "i= " << i << " strings_pos=" << strings_pos << " strings[i] =" << strings[i] << endl;
    memcpy(strings_ + strings_pos, strings[i].c_str(), strings[i].size() + 1);
    id_string_[i] = strings_pos;
    strings_pos += strings[i].size() + 1;
    
    // insert into string_id_hash_
    uint64 h = Fingerprint(strings[i].c_str());
    uint64 pos = h % string_id_size_;
    uint64 step = 0;
    while (1) {
      if (string_id_hash_[pos] == size_) {
	string_id_hash_[pos] = i;
	break;
      }
      step++;
      pos = (pos + step) % string_id_size_;
    }
  }
}

struct TokenTask : public JobQueue::Task {
  void Init(Memblock block, uint64 num_shards, uint64 shard) {
    block_ = block;
    desired_start_ = 
      block.data_ + ShardBegin(block.size_, num_shards, shard);
    desired_end_ = block.data_ + ShardEnd(block.size_, num_shards, shard);
  }
  virtual void ProcessToken(Token *t) = 0;
  
  void Run() {
    cout << "start TokenTask" << endl;
    Touch(Memblock(desired_start_, desired_end_ - desired_start_));
    char * early_start = max(desired_start_ - 1000, block_.data_);
    XMLTokenizer t;
    t.Init(early_start, block_.end());
    while (t.current_ < desired_start_) t.NextToken();
    real_start_ = t.current_;
    while (t.current_ < desired_end_) {
      Token * tok = t.GetNextToken();
      ProcessToken(tok);
    }
    real_end_ = desired_end_;
    cout << "end TokenTask" << endl;
  }

  Memblock block_;
  char * desired_start_;
  char * desired_end_;
  char * real_start_;
  char * real_end_;
};

struct TokenCountTask : public TokenTask {
  void ProcessToken(Token *t) {counts_[t->String()]++;}
  hash_map<string, uint64, StringHash> counts_;
};

/*
struct EncodeTask : public TokenTask {
  void ProcessToken(Token *t) {counts_[t->String()]++;}
  hash_map<string, uint64, StringHash> counts_;
  Lexicon *lexicon_;
};
*/



// returns a vector of TokenTask *  which all must be deleted.
template <class TaskType>
vector<TaskType *> RunTokenTask(Memblock block,
				JobQueue *jq,
				uint64 num_tasks) {
  vector<TaskType *> tasks;
  JobQueue::Job j(jq);
  for (uint64 i=0; i<num_tasks; i++) {
    TaskType * task = new TaskType;
    tasks.push_back(task);
    task->autodelete_ = false;
    task->Init(block, num_tasks, i);
    j.AddTask(task);      
  }
  j.Wait();
  cout << "Finished tasks" << endl;
  for (uint64 i=0; i+1<num_tasks; i++) {
    CHECK(tasks[i]->real_end_ = tasks[i+1]->real_start_);
  }
  return tasks;
}

void Lexicon::BuildFromCorpus(Memblock block, JobQueue *jq) {
  hash_map<string, uint64, StringHash> string_count;
  uint64 num_tasks = ConfigInt("num_worker_threads");
  vector<TokenCountTask *> tasks 
    = RunTokenTask<TokenCountTask>(block, jq, num_tasks);
  for (uint64 i=0; i<num_tasks; i++) {
    cout << "Merging counts from shard " << i << endl;
    forall(run, tasks[i]->counts_) {
      string_count[run->first] += run->second;
    }
    delete tasks[i];
  }
  vector<pair<string, uint64> > v(string_count.begin(), string_count.end());
  sort(v.begin(), v.end(), GreaterSecond<string, uint64>());
  uint64 n = v.size();
  vector<string> strings;
  cout << "#tokens = " << n << endl;
  counts_ = new uint64[n];
  for (uint64 i=0; i<n; i++) {
    if (i < 10) cout << v[i].second << " " << v[i].first << endl;
    strings.push_back(v[i].first);
    counts_[i] = v[i].second;
  }
  Build(strings);
}

void Lexicon::Verify() {
  for (uint64 i=0; i<size_; i++) {
    CHECK(StringToID(IDToString(i)) == i);
  }
  cout << "Verified lexicon of size " << size_ << endl;
}

string Lexicon::MemblockToString(Memblock m){
  string ret;
  for (char *p = m.begin(); p<m.end(); ) {
    uint32 i = ByteDecode(&p);
    if (p != m.begin()) ret += " ";
    ret += IDToString(i);
  }
  return ret; }

void Lexicon::Encode(string data_filename) {
  string encoded_filename = data_filename + ".encoded";
  ofstream output(encoded_filename.c_str(), ofstream::binary);
  char begin = 0x00;
  output.write(&begin, 1);
  XMLTokenizer xmlt;
  xmlt.InitFromFile(data_filename);
  Token *t;  
  string s;
  while ((t = xmlt.GetNextToken())) {
    s = string(t->start_, t->end_ - t->start_);
    uint32 id = StringToID(s.c_str());
    CHECK(id != Size());
    char encoded[5];
    uint32 encoded_num_bytes;
    ByteEncode(id, encoded, &encoded_num_bytes);
    output.write(encoded, encoded_num_bytes);
    //cout << "id = " << id << " wrote " << encoded_num_bytes << " bytes" << endl;
  }
}

void Lexicon::Explore() {
  string cmd;
  cout << "Ready to explore" << endl;
  while (1) {
    cout << ">";
    cin >> cmd;
    if (cmd == "q") break;
    if (cmd == "v") Verify();
    if (cmd == "size") cout << "size = " << Size() << endl;
    if (cmd == "string") {
      uint32 id;
      cin >> id;
      cout << id << " -> " << IDToString(id) << endl;      
    }
    if (cmd == "id") {
      string s;
      cin >> s;
      cout << s << " -> " << StringToID(s.c_str()) << endl;
    }
    if (cmd == "first") {
      uint32 n;
      cin >> n;
      for (uint32 i=0; i<n; i++) {
	if (n < Size()) cout << i << " -> " << IDToString(i) << endl;
      }
    }
    if (cmd == "encode") {
      Encode("data");
      cout << "Encoded" << endl;
    }
  }
}



int lexicon_main() {
  Lexicon l;
  l.EasyLoad("data");
  l.Explore();
  return 0;
}
