// Constructing a DFA from a trie of forbidden words.
// This is a modification of Aho-Corasick algorithm for pattern matching,
// adjusted to the case of LexminDFA where each forbidden word is a lexmin
// representative of a class of words that are equal up to rename of letters.

#include "dfa_construct.h"

#include <sstream>
#include <string>

#include "grate/dfa.h"

namespace growth_rate {

// Return number formed by bits in positions data[offset..offset+n).
static inline size_t unpack_bits(const uint8_t *data, size_t offset, size_t n) {
  CHECK_GT(n, 0);
  data += offset / kBitsInByte;
  offset %= kBitsInByte;
  size_t result = (*data) >> offset;
  size_t result_bits = kBitsInByte - offset;
  if (n < result_bits) {
    result &= ((1ULL << n) - 1);
    return result;
  }
  n -= result_bits;
  while (n >= kBitsInByte) {
    data++;
    result |= (*data) << result_bits;
    result_bits += kBitsInByte;
    n -= kBitsInByte;
  }
  if (n > 0) {
    data++;
    size_t head = (*data) & ((1 << n) - 1);
    result |= head << result_bits;
  }
  return result;
}

// Write bits of "value" to bits in positions data[offset..offset+n).
static inline void pack_bits(uint8_t *data, size_t offset, size_t n,
                             size_t value) {
  CHECK_LT(value, 1ULL << n);
  data += offset / kBitsInByte;
  offset %= kBitsInByte;
  uint8_t mask = ~((1ULL << offset) - 1);
  size_t bits_written = kBitsInByte - offset;
  if (n < bits_written) {
    mask &= ((1ULL << (offset + n)) - 1);
    bits_written = n;
  }
  *data &= ~mask;
  *data |= ((value << offset) & mask);
  n -= bits_written;
  while (n >= kBitsInByte) {
    data++;
    mask = (1ULL << kBitsInByte) - 1;
    *data = (value >> bits_written) & mask;
    bits_written += kBitsInByte;
    n -= kBitsInByte;
  }
  if (n > 0) {
    data++;
    mask = (1ULL << n) - 1;
    *data &= ~mask;
    *data |= (value >> bits_written) & mask;
  }
}

// ---------- NodeData
NodeData::NodeData(const DFAInterface *dfa)
    : dfa_(dfa),
      lf_size_(Log2(RoundUpToPowerOfTwo(dfa_->size()))),
      letter_size_(Log2(RoundUpToPowerOfTwo(dfa_->alphabet_size() + 1))),
      last_size_(letter_size_),
      sigma_size_(dfa_->alphabet_size() * letter_size_),
      back_size_(dfa_->alphabet_size()),
      node_size_in_bits_(lf_size_ + last_size_ + sigma_size_ + back_size_),
      data_size_in_bytes_((dfa_->size() * node_size_in_bits_ + kBitsInByte - 1)
                          / kBitsInByte) {
  data_ = AllocateArray<uint8_t>(data_size_in_bytes_);
  // Initialize data.
  for (size_t node = 0; node < dfa_->size(); node++) {
    set_lf(node, 0);
    set_last(node, undefined_letter());
    for (size_t letter = 0; letter < dfa_->alphabet_size(); letter++) {
      set_sigma(node, letter, undefined_letter());
      set_back(node, letter, false);
    }
  }
}

NodeData::~NodeData() {
  FreeArray(&data_);
}

size_t NodeData::get_lf(size_t node) const {
  CHECK_LT(node, dfa_->size());
  size_t bit_offset = node * node_size_in_bits_;
  size_t lf = unpack_bits(data_, bit_offset, lf_size_);
  CHECK_LT(lf, dfa_->size());
  return lf;
}

uint8_t NodeData::get_last(size_t node) const {
  CHECK_LT(node, dfa_->size());
  size_t bit_offset = node * node_size_in_bits_ + lf_size_;
  uint8_t last = unpack_bits(data_, bit_offset, last_size_);
  CHECK_LE(last, dfa_->alphabet_size());
  return last;
}

uint8_t NodeData::get_sigma(size_t node, uint8_t letter) const {
  CHECK_LT(node, dfa_->size());
  CHECK_LT(letter, dfa_->alphabet_size());
  size_t bit_offset = node * node_size_in_bits_ + lf_size_ +
                      last_size_ + letter * letter_size_;
  uint8_t sigma = unpack_bits(data_, bit_offset, letter_size_);
  CHECK_LE(sigma, dfa_->alphabet_size());
  return sigma;
}

bool NodeData::get_back(size_t node, uint8_t letter) const {
  CHECK_LT(node, dfa_->size());
  CHECK_LT(letter, dfa_->alphabet_size());
  size_t bit_offset = node * node_size_in_bits_ + lf_size_ +
                      last_size_ + sigma_size_ + letter;
  bool back = unpack_bits(data_, bit_offset, 1);
  return back;
}

void NodeData::set_lf(size_t node, size_t lf) {
  CHECK_LT(node, dfa_->size());
  CHECK_LT(lf, dfa_->size());
  size_t bit_offset = node * node_size_in_bits_;
  pack_bits(data_, bit_offset, lf_size_, lf);
}

void NodeData::set_last(size_t node, uint8_t last) {
  CHECK_LT(node, dfa_->size());
  CHECK_LE(last, dfa_->alphabet_size());
  size_t bit_offset = node * node_size_in_bits_ + lf_size_;
  pack_bits(data_, bit_offset, last_size_, last);
}

void NodeData::set_sigma(size_t node, uint8_t letter, uint8_t sigma) {
  CHECK_LT(node, dfa_->size());
  CHECK_LT(letter, dfa_->alphabet_size());
  CHECK_LE(sigma, dfa_->alphabet_size());
  size_t bit_offset = node * node_size_in_bits_ + lf_size_ +
                      last_size_ + letter * letter_size_;
  pack_bits(data_, bit_offset, letter_size_, sigma);
}

void NodeData::set_back(size_t node, uint8_t letter, bool back) {
  CHECK_LT(node, dfa_->size());
  CHECK_LT(letter, dfa_->alphabet_size());
  size_t bit_offset = node * node_size_in_bits_ + lf_size_ +
                      last_size_ + sigma_size_ + letter;
  pack_bits(data_, bit_offset, 1, back);
}

std::string NodeData::dump_node(size_t node) const {
  CHECK_LT(node, dfa_->size());
  std::stringstream out;
  out << "[node = " << node << ": ";
  out << "lf = " << get_lf(node) << ", ";
  out << "last = " << static_cast<int>(get_last(node)) << ", ";
  out << "sigma = <";
  for (uint8_t letter = 0; letter < dfa_->alphabet_size(); letter++) {
    if (letter > 0) out << ", ";
    out << static_cast<int>(get_sigma(node, letter));
  }
  out << ">, back = <";
  for (uint8_t letter = 0; letter < dfa_->alphabet_size(); letter++) {
    if (letter > 0) out << ", ";
    out << static_cast<int>(get_back(node, letter));
  }
  out << ">]";
  return out.str();
}

void NodeData::copy_sigma(size_t dest_node, size_t src_node) {
  CHECK_LT(dest_node, dfa_->size());
  CHECK_LT(src_node, dfa_->size());
  for (uint8_t letter = 0; letter < dfa_->alphabet_size(); letter++) {
    uint8_t sigma = get_sigma(src_node, letter);
    set_sigma(dest_node, letter, sigma);
  }
}

void NodeData::copy_sigma_to_array(size_t node, uint8_t *array) {
  CHECK_LT(node, dfa_->size());
  for (uint8_t letter = 0; letter < dfa_->alphabet_size(); letter++) {
    array[letter] = get_sigma(node, letter);
  }
}

void NodeData::apply_to_sigma(size_t node, uint8_t *permut) {
  CHECK_LT(node, dfa_->size());
  for (uint8_t letter = 0; letter < dfa_->alphabet_size(); letter++) {
    uint8_t sigma = get_sigma(node, letter);
    if (sigma != undefined_letter()) {
      CHECK_LE(permut[sigma], dfa_->alphabet_size());
      set_sigma(node, letter, permut[sigma]);
    }
  }
}

// -------- NodeQueue
NodeQueue::NodeQueue(size_t size)
    : size_(size + 1),
      head_index_(0),
      tail_index_(0) {
  CHECK_GT(size, 0);
  queue_ = AllocateArray<size_t>(size_);
}

NodeQueue::~NodeQueue() {
  FreeArray(&queue_);
}

bool NodeQueue::push(size_t node) {
  size_t next_tail_index_ = (tail_index_ + 1) % size_;
  if (next_tail_index_ == head_index_)
    return false;
  queue_[tail_index_] = node;
  tail_index_ = next_tail_index_;
  return true;
}

size_t NodeQueue::pop() {
  CHECK(!empty());
  size_t node = queue_[head_index_];
  head_index_ = (head_index_ + 1) % size_;
  return node;
}

bool NodeQueue::empty() const {
  return (head_index_ == tail_index_);
}


// -------- DFA construction

// Prerequisite: for a given node "lf" and "last" should be precalculated.
// Sets "sigma" and "back" arrays for a given node, calculates lf and last
// for its sons and pushes them to queue.
static void construct_dfa_node(LexminDFA *dfa, NodeData *node_data,
                               NodeQueue *node_queue, size_t node) {
  CHECK_LT(node, dfa->size());
  uint8_t cur_last = node_data->get_last(node);
  for (uint8_t letter = 0; letter <= cur_last + 1; letter++) {
    if (letter >= dfa->alphabet_size()) break;
    int next_node = dfa->get_next(node, letter);
    size_t lf_node = node_data->get_lf(node);
    if (next_node == LexminDFA::UnknownNode) {
      uint8_t d = node_data->get_sigma(node, letter);
      if (d == node_data->undefined_letter()) {
        uint8_t last_for_lf_node = node_data->get_last(lf_node);
        if (last_for_lf_node == node_data->undefined_letter()) {
          d = 0;
        } else {
          d = last_for_lf_node + 1;
        }
      }
      dfa->set_next(node, letter, dfa->get_next(lf_node, d));
      node_data->set_back(node, letter, true);
    } else if (next_node != LexminDFA::FakeSink) {
      node_queue->push(next_node);
      node_data->set_last(next_node, Max(letter, cur_last));
      node_data->copy_sigma(next_node, node);
      uint8_t d = node_data->get_sigma(node, letter);
      if (d == node_data->undefined_letter()) {
        uint8_t last_for_lf_node = node_data->get_last(lf_node);
        if (last_for_lf_node == node_data->undefined_letter()) {
          d = 0;
        } else {
          d = last_for_lf_node + 1;
        }
        node_data->set_sigma(next_node, letter, d);
      }
      node_data->set_lf(next_node, dfa->get_next(lf_node, d));
      size_t z_node = lf_node;
      while (node_data->get_back(z_node,
                 node_data->get_sigma(next_node, letter))) {
        uint8_t temp_sigma[256];
        node_data->copy_sigma_to_array(z_node, temp_sigma);
        if (temp_sigma[node_data->get_sigma(next_node, letter)] ==
            node_data->undefined_letter()) {
          temp_sigma[node_data->get_sigma(next_node, letter)] =
            node_data->get_last(node_data->get_lf(z_node)) + 1;
        }
        node_data->apply_to_sigma(next_node, temp_sigma);
        z_node = node_data->get_lf(z_node);
      }
    }
  }

  for (uint8_t letter = cur_last + 2; letter < dfa->alphabet_size(); letter++) {
    dfa->set_next(node, letter, dfa->get_next(node, letter - 1));
    if (node_data->get_back(node, letter - 1)) {
      node_data->set_back(node, letter, true);
    }
  }
}

LexminDFA::LexminDFA(LexminTrie *trie)
    : DFAInterface(trie->size(), trie->alphabet_size()) {
  // Reduce the size of array with edges if necessary.
  edges_ = trie->take_edges();
  ReallocateArray(&edges_, size_ * alphabet_size_);
  NodeData *node_data = new NodeData(this);
  // Initialize StartNode
  node_data->set_lf(StartNode, StartNode);
  node_data->set_last(StartNode, node_data->undefined_letter());
  // Run over all nodes in the order of BFS. We can assume that the
  // number of vertex in a queue will not exceed the width of the trie,
  // i.e. the number of leaf nodes (the size of set of forbidden words).
  NodeQueue *node_queue = new NodeQueue(trie->number_of_leaf_nodes());
  if (StartNode + 1 < size_) {
    int second_node = StartNode + 1;
    CHECK_EQ(second_node, get_next(StartNode, 0));
    for (uint8_t letter = 1; letter < alphabet_size_; letter++) {
      set_next(StartNode, letter, second_node);
    }
    node_data->set_lf(second_node, StartNode);
    node_data->set_last(second_node, 0);
    CHECK(node_queue->push(second_node));
  }
  while (!node_queue->empty()) {
    construct_dfa_node(this, node_data, node_queue, node_queue->pop());
  }

  delete node_data;
  delete node_queue;
}

}  // namespace growth_rate
