#include "BartTokenizer.h"
#include "nlohmann/json.hpp"
#include <fstream>
#include <vector>
using json = nlohmann::json;
using namespace bart;
static std::wstring convertToUnicode(const std::string &text) {
  size_t i = 0;
  std::wstring ret;
  while (i < text.size()) {
    wchar_t codepoint;
    utf8proc_ssize_t forward =
        utf8proc_iterate((utf8proc_uint8_t *)&text[i], text.size() - i,
                         (utf8proc_int32_t *)&codepoint);
    if (forward < 0)
      return L"";
    ret += codepoint;
    i += forward;
  }
  return ret;
}

static std::string convertFromUnicode(const std::wstring &wText) {
  char dst[64];
  std::string ret;
  for (auto ch : wText) {
    utf8proc_ssize_t num = utf8proc_encode_char(ch, (utf8proc_uint8_t *)dst);
    if (num <= 0)
      return "";
    ret += std::string(dst, dst + num);
  }
  return ret;
}

std::vector<std::wstring> splitString(const std::wstring &str) {
  std::wstring entry = L"";
  std::vector<std::wstring> res;
  for (auto it : str) {
    entry.append({it});
    res.push_back(entry);
    entry = L"";
  }
  return res;
}

std::vector<wstr_pair> getPairs(const std::vector<std::wstring> &word) {
  std::vector<wstr_pair> res = {};
  for (auto it = word.begin(); it != word.end(); ++it) {
    auto next = ++it;
    it--;
    if (next == word.end()) {
      break;
    }
    res.push_back({*it, *next});
  }
  return res;
}

std::shared_ptr<Vocab> loadVocabFromJson(const std::string &file) {
  std::ifstream f(file);
  json data = json::parse(f);
  std::shared_ptr<Vocab> vocab(new Vocab);
  for (auto &iter : data.items()) {
    std::wstring token = convertToUnicode(iter.key());
    if (token.empty())
      continue;
    (*vocab)[token] = iter.value();
  }
  return vocab;
}

std::wstring composeWord(const std::vector<std::wstring> &words,
                         const char space = ' ') {
  std::wstring res = L"";
  for (auto it = words.begin(); it < words.end();) {
    res += *it;
    it++;
    if (it == words.end()) {
      return res;
    } else {
      res += space;
    }
  }
}

wstr_pair BartTokenizer::get_min(const std::vector<wstr_pair> pairs) {
  int min_idx = 0, min_val = INT16_MAX;
  int counter = 0;
  for (auto pair : pairs) {
    if (this->bpe_rank.find(pair) != this->bpe_rank.end()) {
      auto val = this->bpe_rank[pair];
      if (val < min_val) {
        min_idx = counter;
        min_val = val;
      }
    }
    counter++;
  }
  return pairs[min_idx];
}

wchar_t byte_encode(const wchar_t &c) {
  int c_number = int(c);
  if (c_number < 33 || (c_number > 126 && c_number < 161) ||
      (c_number == 173) || c_number > 255) {
    return wchar_t(c_number + 256);
  } else {
    return c;
  }
}

BartTokenizer::BartTokenizer(const std::string &dir_path) {
  std::ifstream read_file(dir_path + "merges.txt",
                          std::ios::binary | std::ios::in);
  std::string line;
  int counter = 0;
  std::getline(read_file, line);
  while (std::getline(read_file, line)) {
    /* code */
    std::string left, right;
    std::stringstream sstream(line);
    std::getline(sstream, left, ' ');
    std::getline(sstream, right, ' ');
    right = right.substr(0, right.size() - 1);
    this->bpe_rank[std::make_pair(convertToUnicode(left),
                                  convertToUnicode(right))] = counter;
    counter++;
  }
  read_file.close();
  vocab = loadVocabFromJson(dir_path + "vocab.json");
}

void printWord(const std::vector<std::wstring> &strs) {
  std::cout << "debug message" << std::endl;
  for (auto str : strs) {
    std::wcout << str << std::endl;
  }
  std::cout << "finish debug" << std::endl;
}

std::wstring BartTokenizer::bpe(const std::wstring &token) {
  auto word = splitString(token);
  auto pairs = getPairs(word);
  if (pairs.empty()) {
    return token;
  }
  while (true) {
    /* code */
    wstr_pair bigram = this->get_min(pairs);
    if (this->bpe_rank.find(bigram) == this->bpe_rank.end()) {
      break;
    }
    auto first = bigram.first;
    auto second = bigram.second;
    std::vector<std::wstring> new_word;
    new_word = {};
    int i = 0;
    while (i < word.size()) {
      /* code */
      auto it = std::find(word.begin() + i, word.end(), first);
      if (it == word.end()) {
        new_word.insert(new_word.end(), word.begin() + i, word.end());
        break;
      } else {
        new_word.insert(new_word.end(), word.begin() + i, it);
        i = it - word.begin();
      }

      if (word.at(i) == first && i + 1 < word.size() &&
          word.at(i + 1) == second) {
        new_word.push_back(first + second);
        i += 2;
      } else {
        new_word.push_back(first);
        i += 1;
      }
    }
    word = new_word;
    if (word.size() == 1) {
      break;
    } else {
      pairs = getPairs(word);
    }
  }
  std::wstring process_token = composeWord(word);
  return process_token;
}

std::vector<std::wstring> BartTokenizer::tokenize(const std::string &str) {
  std::regex words_regex(this->pat);
  auto words_begin = std::sregex_iterator(str.begin(), str.end(), words_regex);
  auto words_end = std::sregex_iterator();
  std::vector<std::wstring> res;
  for (std::sregex_iterator i = words_begin; i != words_end; ++i) {
    std::smatch match = *i;
    std::string match_str = match.str();
    std::wstring match_wstr = convertToUnicode(match_str);
    int counter = 0;
    for (auto it = match_wstr.c_str(); counter < match_wstr.size(); ++counter) {
      auto new_c = byte_encode(*(it + counter));
      match_wstr.at(counter) = new_c;
    }
    match_wstr = this->bpe(match_wstr);
    std::wstringstream sstream(match_wstr);
    std::wstring line;
    while (std::getline(sstream, line, L' ')) {
      res.push_back(line);
    }
  }
  return res;
}

// a simple moke for BartTokenizer (reduce some logic)
std::vector<int>
BartTokenizer::convert_to_ids(const std::vector<std::wstring> &tokens) {
  std::vector<int> ids = {};
  for (auto token : tokens) {
    if (this->vocab->find(token) != this->vocab->end()) {
      ids.push_back(this->vocab->at(token));
    } else {
      // 3 represent the <unk>
      ids.push_back(3);
    }
  }
  return ids;
}

void printEncoding(const std::vector<int> &ids) {
  std::cout << "[";
  for (auto id : ids) {
    std::cout << " " << id << ",";
  }
  std::cout << "]" << std::endl;
}

std::vector<int> constructIds(const std::vector<int> &text_encoding,
                              const std::vector<int> &text_pair_encoding) {
  std::vector<int> res;
  // 0 represent for cls
  res.push_back(0);
  res.insert(res.end(), text_encoding.begin(), text_encoding.end());
  // 2 represent for seq
  res.push_back(2);
  res.push_back(2);
  res.insert(res.end(), text_pair_encoding.begin(), text_pair_encoding.end());
  res.push_back(2);
  return res;
}

std::vector<int> BartTokenizer::encode(const std::string &text,
                                       const std::string &text_pair) {
  auto text_tokens = tokenize(text);
  auto text_encoding = this->convert_to_ids(text_tokens);
  auto text_pair_tokens = tokenize(text_pair);
  auto text_pair_encoding = this->convert_to_ids(text_pair_tokens);
  return constructIds(text_encoding, text_pair_encoding);
}