use super::{Token, TokenType};
use crate::utils::num_parsing::parse_int_literal;
use std::io::Write;

pub fn format_token(token: &Token) -> String {
    match token.kind {
        TokenType::Eof => String::new(),
        TokenType::Error => {
            format!("Error type A at Line {}: {}", token.pos.line, token.text)
        }
        _ => {
            let type_str = token_type_to_string(&token.kind);
            let text_str = format_token_text(token);
            format!("{} {} at Line {}.", type_str, text_str, token.pos.line)
        }
    }
}

fn token_type_to_string(token_type: &TokenType) -> &'static str {
    match token_type {
        // 关键字
        TokenType::Int => "INT",
        TokenType::Float => "FLOAT",
        TokenType::Void => "VOID",
        TokenType::Const => "CONST",
        TokenType::If => "IF",
        TokenType::Else => "ELSE",
        TokenType::While => "WHILE",
        TokenType::Break => "BREAK",
        TokenType::Continue => "CONTINUE",
        TokenType::Return => "RETURN",

        // 运算符
        TokenType::Plus => "PLUS",
        TokenType::Minus => "MINUS",
        TokenType::Star => "STAR",
        TokenType::Slash => "SLASH",
        TokenType::Percent => "PERCENT",
        TokenType::Lt => "LT",
        TokenType::Le => "LE",
        TokenType::Gt => "GT",
        TokenType::Ge => "GE",
        TokenType::Eq => "EQ",
        TokenType::Ne => "NE",
        TokenType::And => "AND",
        TokenType::Or => "OR",
        TokenType::Not => "NOT",
        TokenType::Assign => "ASSIGN",

        // 符号
        TokenType::LParen => "L_PAREN",
        TokenType::RParen => "R_PAREN",
        TokenType::LBrack => "L_BRACK",
        TokenType::RBrack => "R_BRACK",
        TokenType::LBrace => "L_BRACE",
        TokenType::RBrace => "R_BRACE",
        TokenType::Semi => "SEMICOLON",
        TokenType::Comma => "COMMA",

        // 字面量
        TokenType::IntLit => "INTEGER_CONST",
        TokenType::FloatLit => "FLOAT_CONST",
        TokenType::Ident => "IDENT",

        // 特殊
        TokenType::Eof => "EOF",
        TokenType::Error => "ERROR",
    }
}

fn format_token_text(token: &Token) -> String {
    match token.kind {
        // 整数字面量: 转换为十进制
        TokenType::IntLit => {
            if let Ok(val) = parse_int_literal(&token.text) {
                val.to_string()
            } else {
                token.text.clone()
            }
        }
        TokenType::Ident => token.text.clone(),
        TokenType::FloatLit => token.text.clone(),
        _ => get_token_raw_text(&token.kind),
    }
}

fn get_token_raw_text(token_type: &TokenType) -> String {
    match token_type {
        TokenType::Int => "int".to_string(),
        TokenType::Float => "float".to_string(),
        TokenType::Void => "void".to_string(),
        TokenType::Const => "const".to_string(),
        TokenType::If => "if".to_string(),
        TokenType::Else => "else".to_string(),
        TokenType::While => "while".to_string(),
        TokenType::Break => "break".to_string(),
        TokenType::Continue => "continue".to_string(),
        TokenType::Return => "return".to_string(),

        TokenType::Plus => "+".to_string(),
        TokenType::Minus => "-".to_string(),
        TokenType::Star => "*".to_string(),
        TokenType::Slash => "/".to_string(),
        TokenType::Percent => "%".to_string(),
        TokenType::Lt => "<".to_string(),
        TokenType::Le => "<=".to_string(),
        TokenType::Gt => ">".to_string(),
        TokenType::Ge => ">=".to_string(),
        TokenType::Eq => "==".to_string(),
        TokenType::Ne => "!=".to_string(),
        TokenType::And => "&&".to_string(),
        TokenType::Or => "||".to_string(),
        TokenType::Not => "!".to_string(),
        TokenType::Assign => "=".to_string(),

        TokenType::LParen => "(".to_string(),
        TokenType::RParen => ")".to_string(),
        TokenType::LBrack => "[".to_string(),
        TokenType::RBrack => "]".to_string(),
        TokenType::LBrace => "{".to_string(),
        TokenType::RBrace => "}".to_string(),
        TokenType::Semi => ";".to_string(),
        TokenType::Comma => ",".to_string(),

        _ => String::new(),
    }
}

pub fn write_formatted_tokens(
    tokens: &[Token],
    writer: &mut dyn Write,
    include_errors: bool,
) -> Result<(), std::io::Error> {
    for token in tokens {
        if token.is_eof() {
            break;
        }

        // 如果不包含错误，跳过错误 Token
        if !include_errors && token.is_error() {
            continue;
        }

        let formatted = format_token(token);
        if !formatted.is_empty() {
            writeln!(writer, "{}", formatted)?;
        }
    }
    Ok(())
}

/// 收集所有错误
pub fn collect_lexer_errors(tokens: &[Token]) -> Vec<String> {
    tokens
        .iter()
        .filter(|t| t.is_error())
        .map(|t| format!("Error type A at Line {}: {}", t.pos.line, t.text))
        .collect()
}
