use std::{ env, fs };

mod lexer;
use lexer::tokenize;

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 获取文件名
    let filename = &args[1];

    // 读取输入文件
    let input = fs::read_to_string(filename).expect("Failed to read file");

    // 词法分析
    let tokens = tokenize(&input);

    for token in tokens {
        let (token_type, token_text) = match &token.token {
            lexer::Token::Integer(val) => ("INTEGER_CONST", val.to_string()),
            lexer::Token::IDENT(name) => ("IDENT", name.clone()),
            lexer::Token::CONST => ("CONST", "const".to_string()),
            lexer::Token::INT => ("INT", "int".to_string()),
            lexer::Token::VOID => ("VOID", "void".to_string()),
            lexer::Token::IF => ("IF", "if".to_string()),
            lexer::Token::ELSE => ("ELSE", "else".to_string()),
            lexer::Token::WHILE => ("WHILE", "while".to_string()),
            lexer::Token::BREAK => ("BREAK", "break".to_string()),
            lexer::Token::CONTINUE => ("CONTINUE", "continue".to_string()),
            lexer::Token::RETURN => ("RETURN", "return".to_string()),
            lexer::Token::PLUS => ("PLUS", "+".to_string()),
            lexer::Token::MINUS => ("MINUS", "-".to_string()),
            lexer::Token::MUL => ("MUL", "*".to_string()),
            lexer::Token::DIV => ("DIV", "/".to_string()),
            lexer::Token::MOD => ("MOD", "%".to_string()),
            lexer::Token::ASSIGN => ("ASSIGN", "=".to_string()),
            lexer::Token::EQ => ("EQ", "==".to_string()),
            lexer::Token::NEQ => ("NEQ", "!=".to_string()),
            lexer::Token::LT => ("LT", "<".to_string()),
            lexer::Token::GT => ("GT", ">".to_string()),
            lexer::Token::LE => ("LE", "<=".to_string()),
            lexer::Token::GE => ("GE", ">=".to_string()),
            lexer::Token::NOT => ("NOT", "!".to_string()),
            lexer::Token::AND => ("AND", "&&".to_string()),
            lexer::Token::OR => ("OR", "||".to_string()),
            lexer::Token::LParen => ("L_PAREN", "(".to_string()),
            lexer::Token::RParen => ("R_PAREN", ")".to_string()),
            lexer::Token::LBrace => ("L_BRACE", "{".to_string()),
            lexer::Token::RBrace => ("R_BRACE", "}".to_string()),
            lexer::Token::LBrackt => ("L_BRACKT", "[".to_string()),
            lexer::Token::RBrackt => ("R_BRACKT", "]".to_string()),
            lexer::Token::COMMA => ("COMMA", ",".to_string()),
            lexer::Token::SEMICOLON => ("SEMICOLON", ";".to_string()),
            lexer::Token::UNKNOWN(_) => ("UNKNOWN", "unknown".to_string()),
        };
        eprintln!("{} {} at Line {}.", token_type, token_text, token.line);
    }
}
