﻿#include "frontend/lexer.h"
#include <iostream>

namespace mspc {

Lexer::Lexer(const std::string& input, const std::string& filename)
    : input_(input)
    , filename_(filename)
    , position_(0)
    , line_(1)
    , column_(1) {
    keywords_ = init_key_words_table();
    typewords_ = init_type_words_table();
}

char Lexer::peek() const { return is_at_end() ? '\0' : input_[position_]; }

char Lexer::advance() {
    if (is_at_end()) return '\0';

    char c = input_[position_++];
    if (c == '\n') {
        line_++;
        column_ = 1;
    } else {
        column_++;
    }
    return c;
}

bool Lexer::is_at_end() const { return position_ >= input_.size(); }

void Lexer::skip_whitespace() {
    while (!is_at_end()) {
        char c = peek();
        if (std::isspace(c)) {
            advance();
        } else {
            break;
        }
    }
}

void Lexer::skip_line_comment() {
    advance(); // 跳过第二个'/'
    while (!is_at_end() && peek() != '\n') {
        advance();
    }
}

void Lexer::skip_block_comment() {
    advance(); // 跳过'*'
    size_t start_line = line_;
    size_t start_column = column_;

    while (!is_at_end()) {

        if (peek() == '*' && position_ + 1 < input_.size() && input_[position_ + 1] == '/') {
            // 找到注释结束标记 "*/"
            advance(); // 跳过'*'
            advance(); // 跳过'/'
            return;
        }
        advance(); // 继续扫描注释内容
    }
    throw LexerError("Unterminated block comment", start_line, start_column, filename_);
}

Token Lexer::read_identifier() {
    size_t start = position_ - 1; // 已经读取了第一个字符
    while (!is_at_end() && (std::isalnum(peek()) || peek() == '_')) {
        advance();
    }

    std::string text = input_.substr(start, position_ - start);

    // 检查关键字
    auto it = keywords_.find(text);
    if (it != keywords_.end()) {
        return make_token(it->second, text);
    }

    // 检查类型关键字
    auto type_it = typewords_.find(text);
    if (type_it != typewords_.end()) {
        return make_token(type_it->second, text);
    }

    return make_token(TokenType::IDENTIFIER, text);
}

Token Lexer::read_number() {
    size_t start = position_ - 1;
    bool has_dot = false;
    bool is_hex = false;

    // 检查十六进制前缀
    if (peek() == '0' && position_ + 1 < input_.size() &&
        (input_[position_ + 1] == 'x' || input_[position_ + 1] == 'X')) {
        is_hex = true;
        advance(); // 跳过0
        advance(); // 跳过x/X
    }

    while (!is_at_end()) {
        char c = peek();
        if (std::isdigit(c) || (is_hex && std::isxdigit(c))) {
            advance();
        } else if (c == '.' && !has_dot && !is_hex) {
            has_dot = true;
            advance();
        } else {
            break;
        }
    }

    std::string text = input_.substr(start, position_ - start);
    return make_token(TokenType::NUMBER, text);
}

Token Lexer::read_string() {
    std::string value;

    while (!is_at_end() && peek() != '"') {
        if (peek() == '\\') {
            advance(); // 跳过反斜杠
            if (is_at_end()) break;

            char escaped = advance();
            switch (escaped) {
            case 'n':
                value += '\n';
                break;
            case 't':
                value += '\t';
                break;
            case 'r':
                value += '\r';
                break;
            case '"':
                value += '"';
                break;
            case '\\':
                value += '\\';
                break;
            case '0':
                value += '\0';
                break;
            default:
                value += escaped;
                break;
            }
        } else {
            value += advance();
        }
    }

    if (is_at_end() || peek() != '"') {
        return make_error_token("Unterminated string literal");
    }

    advance(); // 跳过结尾的引号
    return make_token(TokenType::STRING_LITERAL, value);
}

Token Lexer::make_token(TokenType type, const std::string& value) {
    return Token(type, value, line_, column_);
}

Token Lexer::make_error_token(const std::string& message) {
    return Token(TokenType::ERROR, message, line_, column_);
}

Token Lexer::read_next_token() {
    skip_whitespace();

    if (is_at_end()) {
        return make_token(TokenType::END_OF_FILE, "");
    }

    char c = advance();

    // 标识符和关键字
    if (std::isalpha(c) || c == '_') {
        return read_identifier();
    }

    // 数字
    if (std::isdigit(c)) {
        return read_number();
    }

    // 字符串字面量
    if (c == '"') {
        return read_string();
    }

    // 单字符符号
    switch (c) {
    case '{':
        return make_token(TokenType::LBRACE, "{");
    case '}':
        return make_token(TokenType::RBRACE, "}");
    case '[':
        return make_token(TokenType::LBRACKET, "[");
    case ']':
        return make_token(TokenType::RBRACKET, "]");
    case '(':
        return make_token(TokenType::LPAREN, "(");
    case ')':
        return make_token(TokenType::RPAREN, ")");
    case '=':
        return make_token(TokenType::EQUAL, "=");
    case ';':
        return make_token(TokenType::SEMICOLON, ";");
    case '|':
        return make_token(TokenType::PIPE, "|");
    case ':':
        return make_token(TokenType::COLON, ":");
    case ',':
        return make_token(TokenType::COMMA, ",");
    case '<':
        return make_token(TokenType::LESS, "<");
    case '>':
        return make_token(TokenType::GREATER, ">");
    case '.':
        return make_token(TokenType::DOT, ".");
    case '/':
        if (peek() == '/') {
            skip_line_comment();
            return read_next_token();
        }
        if (peek() == '*') {
            skip_block_comment();
            return read_next_token();
        }
        // 如果不是注释，可能是除法操作符（MSP不支持，报错）
        throw LexerError("Unexpected character '/'", line_, column_, filename_);

    default:
        throw LexerError("Unexpected character: " + std::string(1, c), line_, column_, filename_);
    }
}

TokenContainer Lexer::tokenize() {
    TokenContainer container(filename_);

    try {
        Token token;
        TokenType type;
        do {
            token = read_next_token();
            type = token.type;
            container.add_token(std::move(token));
        } while (type != TokenType::END_OF_FILE);

    } catch (const LexerError& e) {
        container.add_token(Token(TokenType::ERROR, e.what(), line_, column_));
    }

    return container;
}

const std::string& Lexer::filename() const { return filename_; }

void Lexer::print_tokens(const std::vector<Token>& tokens) {
    std::cout << "=== Tokens ===" << std::endl;
    for (const auto& token : tokens) {
        std::cout << token.to_string() << std::endl;
    }
    std::cout << "=== Total tokens: " << tokens.size() << " ===" << std::endl;
}

} // namespace mspc
