#include "tokenizer.h"
#include "token.h"

obj::tokenizer::tokenizer(const std::string& filename) : ifs(filename) {

}

void obj::tokenizer::addToken(obj::tokens::token& token) {
    tokens.push_back(&token);
}

obj::TokenizerResult obj::tokenizer::next(std::string& token) {
    token.clear();

    while (!ifs.eof()) {
        char next = ifs.next();
        token += next;
        unsigned cant = countTokens(token);

        if (cant == 0 || (cant == 1 && ifs.eof())) {
            if (token.size() == 0)
                return TOKEN_NOT_RECOGNIZED;

            if (cant == 0) {
                ifs.restore(next);
                token.resize(token.size() - 1);
            }

            if (countTokens(token) != 1)
                return TOKEN_NOT_RECOGNIZED;

            if (getToken().needsMore())
                return TOKEN_NOT_RECOGNIZED;

            return NO_ERROR;
        }
    }

    return (token.size() == 0 ? EOF_REACHED : TOKEN_NOT_RECOGNIZED);
}

unsigned obj::tokenizer::countTokens(const std::string& token) {
    unsigned cant = 0;

    for (size_t i = 0; i < tokens.size(); ++i) {
        obj::tokens::token& current = *tokens[i];
        current.setString(token);
        cant += (current.isToken() ? 1 : 0);
    }

    return cant;
}

obj::tokens::token& obj::tokenizer::getToken() {
    for (size_t i = 0; i < tokens.size(); ++i) {
        obj::tokens::token& current = *tokens[i];
        if (current.isToken())
            return current;
    }
    throw;
}
