/**
 * \file lex.c
 **/

#include "lex.h"
#include "mem.h"
#include "re.h"
#include "string.h"
#include "log.h"

void r_lex_init(RLexer* lexer, int environments)
{
  lexer->rules.el_size = sizeof(RLexRule);
  r_vector_init(&(lexer->rules));
  lexer->stack.el_size = sizeof(int);
  r_vector_init(&(lexer->stack));
  lexer->environments = environments;
  lexer->nfas = (NFANode**) r_mem_malloc(environments * sizeof(NFANode*));

  /* Initialize each NFA with a split node at the start. */
  int i;
  for (i = 0; i < environments; i++){
    /* Create a split node */
    NFANode* node = r_mem_malloc(sizeof(NFANode));
    r_nfanode_init_split(node, 8);
    lexer->nfas[i] = node;
  }
}

void r_lex_add(RLexer* lexer, char* pattern, int id, int env, int to_env)
{
  /* Index of the rule in the rules array will be used as the ID of the TERM
   * node in the NFA. */
  int rule_index = lexer->rules.size;
  NFANode* regexp = r_re_compile(pattern, rule_index);
  r_nfanode_add_child(lexer->nfas[env], regexp);

  /* Add the rule to the rules vector */
  RLexRule rule = {id, to_env};
  r_vector_append(&(lexer->rules), &rule);
}

RVector* r_lex_tokenize(RLexer* lexer)
{
  char* cur = lexer->input;
  char* end = cur + strlen(lexer->input);
  int env = 0; /* current environment */

  RVector* token_list = (RVector*) r_mem_malloc(sizeof(RVector));
  r_tok_list_init(token_list);

  while (cur != end){
    int matched_id;
    int matched_len;
    if (r_re_match(cur, lexer->nfas[env], true, &matched_id, &matched_len)){
      assert(matched_len > 0);

      /* Allocate the memory for the token. */
      char* token = r_mem_malloc(matched_len + 1);
      memcpy(token, cur, matched_len);
      token[matched_len] = 0;

      /* Move forward in the input. */
      cur += matched_len;

      /* Get the token ID and to_env from the rules table. */
      RLexRule rule;
      r_vector_get_into(&(lexer->rules), matched_id, &rule);
      if (rule.id != 0){
        r_tok_list_add(token_list, rule.id, token);
        r_debug(R_DETAILED, "added a token of type %d", rule.id);
      }; /* Otherwise, just consume and don't add a token to the list */

      /* Deal with the environment */
      if (rule.to_env == -1){
        /* if to_env is -1, pop the previous environment. */
        r_vector_pop(&(lexer->stack), &env);
      } else  if (env != rule.to_env){
        /* Environment change, push the environment on the stack. */
        r_vector_append(&(lexer->stack), &env);
        env = rule.to_env;
      }
    } else {
      r_debug(R_WARNING, "failed to match at location %d",
                           cur - lexer->input);
      return NULL;
    }
  }
  return token_list;
}
