#include "lexer.hpp"

#include <algorithm>
#include <stdexcept>
#include <cctype>
#include <sstream>
#include <map>

const std::map<char, TokenType> OPERATORS = {
    {'+', PLUS},
    {'-', MINUS},
    {'*', STAR},
    {'/', SLASH},

    {'(', LPAR},
    {')', RPAR},

    {'=', EQU}
};

const std::map<const std::string, TokenType> KEYWORDS = {
    {"return", RETURN},
    {"typedef", TYPEDEF}
};

const char CHARS_SKIP[] = {
    ' ', '\t', '\n', ';'
};

class Lexer {
public:
    Lexer(const std::string& code) : code(code) {
        this->length = code.length();
        this->current_pos = 0;
    }

    std::vector<Token> tokenize() {
        while (current_pos < length) {
            char current = peek();

            if (std::isdigit(current))
                tokenize_number();
            else if (OPERATORS.find(current) != OPERATORS.end())
                tokenize_op();
            else if (std::isalpha(current))
                tokenize_word();
            else if (std::find(std::begin(CHARS_SKIP), std::end(CHARS_SKIP), current) != std::end(CHARS_SKIP))
                next();
            else
                throw std::runtime_error("Unknow token");
        }
        return output;
    }
private:
    void tokenize_number() {
        std::stringstream buffer;
        char current = peek();

        while (true) {
            if (std::isdigit(current)) {
                buffer << current;
                current = next();
            } else if (current == '_') {
                current = next();
            } else {
                break;
            }
        }

        push_token(NUMBER, buffer.str());
    }

    void tokenize_op() {
        push_token(OPERATORS.at(peek()));
        next();
    }

    void tokenize_word() {
        std::stringstream buffer;
        char current = peek();

        while (std::isalpha(current)) {
            buffer << current;
            current = next();
        }

        if (KEYWORDS.find(buffer.str()) != KEYWORDS.end()) {
            push_token(KEYWORDS.at(buffer.str()));
        } else {
            push_token(WORD, buffer.str());
        }
    }

    char next() {
        current_pos++;
        return peek();
    }

    char peek() {
        return peek(0);
    }

    char peek(int pos) {
        int new_pos = current_pos + pos;
        if (new_pos >= length) return '\0';
        return code[new_pos];
    }

    void push_token(TokenType type) {
        push_token(type, "");
    }

    void push_token(TokenType type, std::string value) {
        output.push_back(Token(type, value));
    }

    const std::string& code;
    int length;
    int current_pos;
    std::vector<Token> output;
};

std::vector<Token> tokenize(const std::string& code) {
    return Lexer(code).tokenize();
}
