#include <cctype>
#include <iomanip>
#include "tokenizer.h"

#define NEW_TOKEN(t) \
    new Token(t, curLineno_, curCol_, text, this);

#define CHECK_BEGIN(t,s) \
    if (text == s) { \
        token = NEW_TOKEN(t); \
    }

#define CHECK_NEXT(t,s) \
    else if (text == s) { \
        token = NEW_TOKEN(t); \
    }

#define CHECK_END() \
    else { \
        token = NEW_TOKEN(TokenType::IDENTIFIER); \
    }

#define CHECK_END_FAIL(e) \
    else { \
        if (!ignore_) { \
            throw std::runtime_error(e); \
        } else { \
            token = NEW_TOKEN(TokenType::IDENTIFIER); \
        } \
    }

constexpr char null_c = '\0';
constexpr std::int32_t PRINT_LINE_LIMIT = 5;

inline bool isBlank(char c) {
    return (std::isblank(c) || (c == '\n') || (c == '\r'));
}

void Token::printLine(std::ostream& os) {
    tokenizer_->printLine(*this, os);
}

Tokenizer::~Tokenizer() {
    for (auto& t : tokenList_) {
        delete t;
    }
    tokenList_.clear();
}

Token& Tokenizer::step(std::istream& is) {
    char c = null_c;
    if (is.eof() || is.bad() || is.get(c).fail()) {
        return Token::nullToken();
    }
    if (std::isalnum(c) || (c == '_')) {
        is.unget();
        return getIdentifier(is);
    } else if (isBlank(c)) {
        return getBlank(c);
    } else {
        is.unget();
        return getSymbol(is);
    }
}

std::int32_t calcColumn(Token& token) {
    if (token.getText() == "\r" || token.getText() == "\n") {
        return 0;
    } else if (token.getText() == "\t") {
        return 4;
    } else {
        return (std::int32_t)(token.getText().size());
    }
}

Token& Tokenizer::popStack() {
    if (tokenStack_.empty()) {
        return Token::nullToken();
    }
    auto* token = tokenStack_.back();
    tokenList_.emplace_back(token);
    tokenStack_.pop_back();
    curToken_ = *token;
    curLineno_ = curToken_.getLineno();
    curCol_ = curToken_.getCol();
    return *token;
}

Token& Tokenizer::getNext(std::istream& is) {
    auto& stackToken = popStack();
    if (stackToken) {
        return stackToken;
    }
    auto& token = step(is);
    if (token) {
        curCol_ += calcColumn(token);
        token.setFile(file_);
        curToken_ = token;
    }
    return token;
}

void Tokenizer::unget() {
    if (tokenList_.empty()) {
        return;
    }
    tokenStack_.emplace_back(tokenList_.back());
    tokenList_.pop_back();
    if (tokenList_.empty()) {
        curToken_ = Token::nullToken();
        curCol_ = 1;
        curLineno_ = 1;
    } else {
        curToken_ = *tokenList_.back();
        curCol_ = curToken_.getCol();
        curLineno_ = curToken_.getLineno();
    }
}

Token& Tokenizer::getBlank(const char c) {
    switch (c) {
        case '\n':
            curLineno_ += 1;
            curCol_ = 1;
            break;
    }
    std::string s;
    s += c;
    Token* token = new Token(TokenType::TERM_BLANK, curLineno_, curCol_, s, this);
    tokenList_.emplace_back(token);
    return *token;
}

Token& Tokenizer::getSymbol(std::istream& is) {
    std::string text;
    char c = null_c;
    while (!is.get(c).fail() && !std::isalnum(c) && !isBlank(c)) {
        text += c;
        if (text.size() > 1) {
            if (text == "//" || text == "/*" || text == "*/") {
                break;
            } else {
                is.unget();
                text.pop_back();
                break;
            }
        }
    }
    if (isBlank(c) || std::isalnum(c)) {
        is.unget();
    }
    Token* token = nullptr;
    CHECK_BEGIN(TokenType::TERM_HASH, "#")
    CHECK_NEXT(TokenType::TERM_DOUBLE_SLASH, "//")
    CHECK_NEXT(TokenType::TERM_QUOTE, "\"")
    CHECK_NEXT(TokenType::TERM_COMMENT_BEGIN, "/*")
    CHECK_NEXT(TokenType::TERM_COMMENT_END, "*/")
    CHECK_NEXT(TokenType::TERM_LEFT_BRACE, "{")
    CHECK_NEXT(TokenType::TERM_RIGHT_BRACE, "}")
    CHECK_NEXT(TokenType::TERM_TEMP_LEFT, "<")
    CHECK_NEXT(TokenType::TERM_TEMP_RIGHT, ">")
    CHECK_NEXT(TokenType::TERM_COMMA, ",")
    CHECK_NEXT(TokenType::TERM_LEFT_ROUND, "(")
    CHECK_NEXT(TokenType::TERM_RIGHT_ROUND, ")")
    CHECK_NEXT(TokenType::TERM_DOT, ".")
    CHECK_NEXT(TokenType::TERM_EQUAL, "=")
    CHECK_NEXT(TokenType::TERM_LEFT_BRACKET, "[")
    CHECK_NEXT(TokenType::TERM_RIGHT_BRACKET, "]")
    CHECK_NEXT(TokenType::TERM_COLON, ":")
    CHECK_END_FAIL("Unrecognized token")
    tokenList_.emplace_back(token);
    return *token;
}

Token& Tokenizer::getIdentifier(std::istream& is) {
    std::string text;
    char c = null_c;
    while (!is.get(c).fail() && !isBlank(c) && (std::isalnum(c) || c == '_')) {
        text += c;
    }
    is.unget();
    if (text.empty()) {
        return Token::nullToken();
    }
    Token* token = nullptr;
    CHECK_BEGIN(TokenType::KEYWORD_STRUCT, "struct")
    CHECK_NEXT(TokenType::KEYWORD_SERVICE, "service")
    CHECK_NEXT(TokenType::KEYWORD_STRING, "string")
    CHECK_NEXT(TokenType::KEYWORD_VOID, "void")
    CHECK_NEXT(TokenType::KEYWORD_I8, "i8")
    CHECK_NEXT(TokenType::KEYWORD_I16, "i16")
    CHECK_NEXT(TokenType::KEYWORD_I32, "i32")
    CHECK_NEXT(TokenType::KEYWORD_I64, "i64")
    CHECK_NEXT(TokenType::KEYWORD_UI8, "ui8")
    CHECK_NEXT(TokenType::KEYWORD_UI16, "ui16")
    CHECK_NEXT(TokenType::KEYWORD_UI32, "ui32")
    CHECK_NEXT(TokenType::KEYWORD_UI64, "ui64")
    CHECK_NEXT(TokenType::KEYWORD_FLOAT, "float")
    CHECK_NEXT(TokenType::KEYWORD_DOUBLE, "double")
    CHECK_NEXT(TokenType::KEYWORD_SET, "set")
    CHECK_NEXT(TokenType::KEYWORD_DICT, "dict")
    CHECK_NEXT(TokenType::KEYWORD_SEQ, "seq")
    CHECK_NEXT(TokenType::KEYWORD_BOOL, "bool")
    CHECK_NEXT(TokenType::KEYWORD_IMPORT, "import")
    CHECK_NEXT(TokenType::KEYWORD_ONEWAY, "oneway")
    CHECK_NEXT(TokenType::KEYWORD_MULTIPLE, "multiple")
    CHECK_NEXT(TokenType::KEYWORD_SINGLE, "single")
    CHECK_NEXT(TokenType::KEYWORD_REENTRANT, "reentrant")
    CHECK_NEXT(TokenType::KEYWORD_STATIC, "static")
    CHECK_NEXT(TokenType::KEYWORD_DYNAMIC, "dynamic")
    CHECK_NEXT(TokenType::KEYWORD_GENERIC, "generic")
    CHECK_NEXT(TokenType::KEYWORD_TIMEOUT, "timeout")
    CHECK_NEXT(TokenType::KEYWORD_RETRY, "retry")
    CHECK_NEXT(TokenType::KEYWORD_NOEXCEPT, "noexcept")
    CHECK_NEXT(TokenType::KEYWORD_ENUM, "enum")
    CHECK_NEXT(TokenType::KEYWORD_STREAM, "stream")
    CHECK_END()
    tokenList_.emplace_back(token);
    return *token;
}

void Tokenizer::collectTokenForward(Token& token, TokenList& list) {
    auto n = 0;
    bool found = false;
    for (auto it = tokenList_.begin(); it != tokenList_.end(); it++) {
        auto* checkToken = *it;
        if (found) {
            if (checkToken->getText() == "\n") {
                n += 1;
                if (n >= PRINT_LINE_LIMIT) {
                    break;
                } else {
                    list.push_back(checkToken);
                }
            } else {
                list.push_back(checkToken);
            }
        } else {
            if (*checkToken == token) {
                found = true;
                list.push_back(checkToken);
            }
        }
    }
}

void Tokenizer::collectTokenBackward(Token& token, TokenList& list,
    std::int32_t& tabCount, std::int32_t& scoreCount, std::int32_t& spaceCount) {
    auto n = 0;
    auto found = false;
    for (auto it = tokenList_.rbegin(); it != tokenList_.rend(); it++) {
        auto* checkToken = *it;
        if (found) {
            if (checkToken->getText() == "\n") {
                n += 1;
                if (n >= PRINT_LINE_LIMIT) {
                    break;
                } else {
                    list.push_front(checkToken);
                }
            } else {
                if (!n) {
                    if (checkToken->getText() == "\t") {
                        tabCount += 1;
                    } else {
                        spaceCount += checkToken->count();
                    }
                }
                list.push_front(checkToken);
            }
        } else {
            if (*checkToken == token) {
                scoreCount = token.count();
                found = true;
            }
        }
    }
}

bool Tokenizer::tryPrintErrorMessage(Token& token, TokenList& list, std::int32_t& lineno,
    std::int32_t tabCount, std::int32_t scoreCount, std::int32_t spaceCount, std::ostream& os) {
    bool errorDraw = true;
    bool print = false;
    bool end = false;
    for (auto* t : list) {
        if (*t == token) {
            print = true;
        }
        if (!print) {
            if (lineno != t->getLineno()) {
                lineno = t->getLineno();
                os << std::endl << "\t" << lineno << " ";
                if (t->getText() != "\n") {
                    os << t->getText();
                }
            } else {
                os << t->getText();
            }
        } else {
            if (t->getText() == "\n") {
                end = true;
            } else {
                os << t->getText();
                continue;
            }
            if (end) {
                printError(os, lineno, tabCount + 1, scoreCount, spaceCount + 1);
                errorDraw = false;
                print = false;
            }
        }
    }
    return !errorDraw;
}

void Tokenizer::printLine(Token& token, std::ostream& os) {
    std::list<Token*> temp;
    std::int32_t tabCount = 0;
    std::int32_t scoreCount = 0;
    std::int32_t spaceCount = 0;
    std::int32_t lineno = 0;
    collectTokenForward(token, temp);
    collectTokenBackward(token, temp, tabCount, scoreCount, spaceCount);
    auto result = tryPrintErrorMessage(token, temp, lineno, tabCount, scoreCount,
        spaceCount, os);
    if (!result) {
        printError(os, lineno, tabCount + 1, scoreCount, spaceCount + 1);
    }
    os << std::endl << "<-" << std::endl;
}

void Tokenizer::printError(std::ostream& os, std::int32_t lineno, std::int32_t tabCount,
    std::int32_t scoreCount, std::int32_t spaceCount) {
    os << std::endl;
    for (decltype(tabCount) i = 0; i < tabCount; i++) {
        os << "\t";
    }
    auto temp = std::to_string(lineno).size() + spaceCount;
    for (decltype(temp) i = 0; i < temp; i++) {
        os << " ";
    }
    for (decltype(scoreCount) i = 0; i < scoreCount; i++) {
        os << "~";
    }
}
