use std::{env, fs};

mod tokenize;
use tokenize::{tokenize, LexResult, Token};

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 获取文件名
    let filename = &args[1];

    // 读取输入文件
    let input = match fs::read_to_string(filename) {
        Ok(content) => content,
        Err(e) => {
            eprintln!("Failed to read file '{}': {}", filename, e);
            std::process::exit(1);
        }
    };

    // 词法分析
    match tokenize(&input) {
        LexResult::Success(tokens) => {
            // 输出所有识别到的tokens
            for token_with_location in tokens {
                let token_str = match &token_with_location.token {
                    Token::Const => "CONST const".to_string(),
                    Token::Int => "INT int".to_string(),
                    Token::Void => "VOID void".to_string(),
                    Token::If => "IF if".to_string(),
                    Token::Else => "ELSE else".to_string(),
                    Token::While => "WHILE while".to_string(),
                    Token::Break => "BREAK break".to_string(),
                    Token::Continue => "CONTINUE continue".to_string(),
                    Token::Return => "RETURN return".to_string(),
                    
                    Token::Plus => "PLUS +".to_string(),
                    Token::Minus => "MINUS -".to_string(),
                    Token::Mul => "MUL *".to_string(),
                    Token::Div => "DIV /".to_string(),
                    Token::Mod => "MOD %".to_string(),
                    Token::Assign => "ASSIGN =".to_string(),
                    Token::Eq => "EQ ==".to_string(),
                    Token::Neq => "NEQ !=".to_string(),
                    Token::Lt => "LT <".to_string(),
                    Token::Gt => "GT >".to_string(),
                    Token::Le => "LE <=".to_string(),
                    Token::Ge => "GE >=".to_string(),
                    Token::Not => "NOT !".to_string(),
                    Token::And => "AND &&".to_string(),
                    Token::Or => "OR ||".to_string(),
                    
                    Token::LParen => "L_PAREN (".to_string(),
                    Token::RParen => "R_PAREN )".to_string(),
                    Token::LBrace => "L_BRACE {".to_string(),
                    Token::RBrace => "R_BRACE }".to_string(),
                    Token::LBracket => "L_BRACKT [".to_string(),
                    Token::RBracket => "R_BRACKT ]".to_string(),
                    Token::Comma => "COMMA ,".to_string(),
                    Token::Semicolon => "SEMICOLON ;".to_string(),
                    
                    Token::Identifier(name) => format!("IDENT {}", name),
                    Token::IntegerConstant(value) => format!("INTEGER_CONST {}", value),
                    
                    // Comments are already filtered out, but just in case
                    Token::LineComment(_) | Token::MultilineComment(_) => continue,
                };
                
                eprintln!("{} at Line {}.", token_str, token_with_location.line);
            }
        }
        LexResult::Error { line, message } => {
            eprintln!("Error type A at Line {}:{}", line, message);
            std::process::exit(1);
        }
    }
}
