#include "lexer.h"

using std::cerr;
using std::clog;
using std::endl;
using std::list;
using std::map;
using std::pair;
using std::set;
using std::string;

// Internal method:
void detection(TokenGroup &token_list, string &word);
void case_symbol(TokenGroup &token_list, string::iterator &iterator);
void case_operator(TokenGroup &token_list, string::iterator &iterator);
void case_string(TokenGroup &token_list, string::iterator &iterator, unsigned long &line);
void case_number(TokenGroup &token_list, string::iterator &iterator, unsigned long &line);

pair<TokenTypes, string> token;

// Lexer
TokenGroup lexer(std::string content) {
    clock_t start = clock();
    unsigned long line = 1;
    string word;
    TokenGroup token_group = {};

    string::iterator iterator = content.begin();
    while (iterator != content.end()) {

        if (symbol_set.count(*iterator)) {
            // Symbol
            detection(token_group, word);
            case_symbol(token_group, iterator);
        } else if (single_operator_set.count(*iterator)) {
            // Operator
            detection(token_group, word);
            case_operator(token_group, iterator);
        } else if (*iterator == '\'' or *iterator == '\"') {
            // String
            detection(token_group, word);
            case_string(token_group, iterator, line);
        } else if ('0' <= *iterator and *iterator <= '9') {
            // Number
            detection(token_group, word);
            case_number(token_group, iterator, line);
        } else if (*iterator == '\n') {
            // Enter
            line++;
            detection(token_group, word);
        } else if (*iterator == '\0') {
            // End of File
            detection(token_group, word);
        } else {
            // Generally
            if (*iterator != ' ')
                word += *iterator;
            else
                detection(token_group, word);
        }

        iterator++;
    }

    clock_t end = clock();

    clog << "[Lexer]\t\t Complete lexical analysis in " << (end - start) << "ms ";
    clog << "with " << token_group.size() << " tokens." << endl;
    return (token_group);
}

void output(TokenGroup content) {
    clog << "{";
    TokenGroup::iterator iterator;
    for (iterator = content.begin(); iterator != content.end(); iterator++) {
        clog << "\n    " << (*iterator).first << " : \"" << (*iterator).second << "\",";
    }
    clog << "\b " << endl;
    clog << "}";
}

void detection(TokenGroup &token_list, string &word) {
    // When the text is truncated detects whether the word is an identifier or a keyword.
    if (!word.empty()) {
        token.first = keyword_set.count(word) ? Keyword : Identifier;
        token.second = word;
        token_list.push_back(token);
        word = "";
    }
}

void case_symbol(TokenGroup &token_list, string::iterator &iterator) {
    token.first = Symbol;
    token.second = *iterator;
    token_list.push_back(token);
}

void case_operator(TokenGroup &token_list, string::iterator &iterator) {
    string double_character;
    double_character += *iterator;
    double_character += *(++iterator);
    token.first = Operator;
    if (double_operator_set.count(double_character)) {
        token.second = double_character;
    } else {
        iterator--;
        token.second = *iterator;
    }
    token_list.push_back(token);
}

void case_string(TokenGroup &token_list, string::iterator &iterator, unsigned long &line) {
    const char quote_type = *iterator;
    string text;
    iterator++;
    while (*iterator != quote_type) {
        if (*iterator == '\\')
            iterator++;
        if (*iterator == '\n')
            line += 1;
        if (*iterator == '\0') {
            cerr << "[Lexer]\t\t Error : You might have missed a quote";
            cerr << " that prevented proper lexical analysis." << endl;
            exit(1);
        }
        text += *iterator;
        iterator++;
    }
    token.first = String;
    token.second = text;
    token_list.push_back(token);
}

void case_number(TokenGroup &token_list, string::iterator &iterator, unsigned long &line) {
    bool dot = false;
    string number;
    while (('0' <= *iterator and *iterator <= '9') or (*iterator == '.')) {
        if (*iterator == '.') {
            if (dot)
                cerr << "[Lexer]\t\t Error on line:" << line << " Numbers are not legal." << endl;
            else
                dot = true;
        }
        number += *iterator;
        iterator++;
    }
    token.first = Number;
    token.second = number;
    token_list.push_back(token);
    iterator--;
}