#include <glib.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "Lexer.h"
#include "util.h"


/*****************************************************************
 * Private API.
 *****************************************************************/
void lexer_read_token(Lexer* lexer);
void lexer_flush(Lexer* lexer, int n);
void lexer_flush_all(Lexer* lexer);
void lexer_drop_all(Lexer* lexer);
void lexer_syntax_error(Lexer* lexer, const char* msg);

void lexer_read_token(Lexer* lexer)
{
  GList*       last_item = g_list_last(lexer->lexicons);
  Lexicon*     lexicon   = (Lexicon*)last_item->data;
  LexiconIter* iter      = NULL;
  Token*       token     = NULL;

  /* End reached? Signal EOF. */
  if (lexer_is_done(lexer)) {
    token = token_new("EOF", NULL);
    lexer->token_buffer = g_list_append(lexer->token_buffer, token);
    return;
  }

  /* Walk through all patterns of the current lexicon and try
   * to find a match. */
  iter = lexicon_iter_new(lexicon);
  while (iter) {
    const char* name    = lexicon_iter_get_name(iter);
    Regex*      regex   = lexicon_iter_get_regex(iter);
    const char* current = lexer->input + lexer->current_char;
    Vector*     matches = regex_match(regex, current);

    if (matches != NULL) {
      const char* match = vector_get(matches, 0);

      /* Append the token to the list. */
      token = token_new(name, matches);
      lexer->token_buffer = g_list_append(lexer->token_buffer, token);

      /* Point to the next token. */
      lexer->current_char += strlen(match);
      while (*match != 0)
        if (*(match++) == '\n')
          lexer->current_line++;

      lexicon_iter_free(iter);
      return;
    }

    iter = lexicon_iter_next(iter);
  }

  /* Ending up here a match was not found. */
  lexicon_iter_free(iter);
  lexer_syntax_error(lexer, "Invalid syntax.");
}

void lexer_flush(Lexer* lexer, int n)
{
  GList* item = lexer->token_buffer;
  while (item && n-- > 0) {
    token_free(item->data);
    lexer->token_buffer = g_list_delete_link(lexer->token_buffer, item);
    item = lexer->token_buffer;
  }
}

void lexer_flush_all(Lexer* lexer)
{
  GList* item = lexer->token_buffer;
  while (item) {
    token_free((Token*)item->data);
    item = item->next;
  }
  g_list_free(lexer->token_buffer);
  lexer->token_buffer = NULL;
}

void lexer_drop_all(Lexer* lexer)
{
  GList* item = lexer->token_buffer;
  while (item) {
    Token*      token   = (Token*)item->data;
    const char* match   = (const char*)vector_get(token->matches, 0);
    lexer->current_char -= strlen(match);
    item = item->next;
  }
  lexer_flush_all(lexer);
}

void lexer_syntax_error(Lexer* lexer, const char* msg)
{
  g_error("%s\n", msg);
}


/*****************************************************************
 * Public API.
 *****************************************************************/
Lexer* lexer_new(void)
{
  Lexer* lexer        = (Lexer*)g_malloc0(sizeof(Lexer));
  lexer->token_buffer = NULL;
  lexer->lexicons     = NULL;
  return lexer;
}

void lexer_free(Lexer* lexer)
{
  g_list_free(lexer->lexicons);
  lexer_flush_all(lexer);
  g_free(lexer);
}

void lexer_set_input(Lexer* lexer, const char* input)
{
  lexer->input        = input;
  lexer->current_line = 1;
  lexer->current_char = 0;
  lexer->input_length = strlen(input);
}

short int lexer_is_done(Lexer* lexer)
{
  return (lexer->current_char >= lexer->input_length) ? 1 : 0;
}

void lexer_push_lexicon(Lexer* lexer, Lexicon* lexicon)
{
  lexer->lexicons = g_list_append(lexer->lexicons, lexicon);
  lexer_drop_all(lexer);
}

void lexer_pop_lexicon(Lexer* lexer)
{
  GList* last = g_list_last(lexer->lexicons);
  lexicon_free((Lexicon*)last->data);
  lexer->lexicons = g_list_delete_link(lexer->lexicons, last);
  lexer_drop_all(lexer);
}

Token* lexer_get_token(Lexer* lexer)
{
  if (g_list_length(lexer->token_buffer) == 0)
    lexer_read_token(lexer);
  return (Token*)g_list_first(lexer->token_buffer)->data;
}

short int lexer_current_is(Lexer* lexer, Token* token)
{
  Token* current = lexer_get_token(lexer);
  return token_compare(current, token);
}

short int lexer_next_if(Lexer* lexer, Token* token)
{
  if (!lexer_current_is(lexer, token))
    return 0;
  lexer_flush(lexer, 1);
  return 1;
}

Vector* lexer_next_if_re_list(Lexer* lexer, GList* re_list)
{
  GList*      item    = re_list;
  GList*      result  = NULL;
  Vector*     matches = NULL;
  long        pos     = 0;
  int         nl      = 0;
  const char* match   = NULL;

  // We need to drop anything in the buffer first, as else it would be 
  // invalid in case this function matches.
  lexer_drop_all(lexer);
  pos = lexer->current_char;

  // Match against any given regular expressions.
  while (item) {
    Regex* re = (Regex*)item->data;
    matches   = regex_match(re, lexer->input + pos);
    //printf("Matching: '%s' against '%s'\n", re->pattern, lexer->input + pos);

    // All items must match.
    if (!matches) {
      //printf("Doh\n");
      if (result)
        g_list_free_with_data(result);
      return NULL;
    }
    //printf("Yeh\n");

    // Ending up here, the item did match.
    match  = vector_get(matches, 0);
    pos   += strlen(match);
    result = g_list_append(result, (void*)match);
    while (*match != 0)
      if (*(match++) == '\n')
        nl++;
    vector_free(matches);

    item = item->next;
  }

  // Ending up here, all of the given regular expressions matched. Copy the 
  // result into a vector.
  lexer->current_char  = pos;
  lexer->current_line += nl;
  matches              = vector_new(g_list_length(result));
  item                 = result;
  pos                  = 0;
  while (item) {
    vector_set(matches, pos, item->data);
    item = item->next;
    pos++;
  }
  g_list_free(result);

  return matches;
}

void lexer_expect(Node* sender, Lexer* lexer, Token* token)
{
  Token* cur_token  = lexer_get_token(lexer);
  char*  cur_string = (char*)vector_get(cur_token->matches, 0);
  char*  tok_string = (char*)vector_get(token->matches, 0);
  char   error[201] = "";
  if (lexer_next_if(lexer, token))
    return;
  if (vector_get(token->matches, 0) == NULL)
    snprintf(error,
             200,
             "Expected %s but got %s\n",
             token->type,
             cur_string);
  else
    snprintf(error,
             200,
             "Expected \"%s\" but got %s \"%s\"\n",
             tok_string,
             cur_token->type,
             cur_string);
  sender->character = lexer->current_char;
  node_syntax_error(sender->parent, error);
}
