//! SysY编译器 - 程序入口

mod token;
mod utils;
mod lexer;

use std::{env, fs};
use token::Token;
use lexer::tokenize;

fn main() {
    let args: Vec<String> = env::args().collect();

    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    let filename = &args[1];
    let input = fs::read_to_string(filename).expect("Failed to read file");
    
    match tokenize(&input) {
        Ok(tokens) => {
            // 没有词法错误，输出所有token
            for token in &tokens {
                let display_value = match &token.token {
                    Token::Ident(s) => s.clone(),
                    Token::IntegerConst(s) => s.clone(),
                    _ => token.span.clone(),
                };
                eprintln!("{} {} at Line {}.", token.token, display_value, token.line);
            }
        },
        Err(error_msg) => {
            // 有词法错误，输出错误信息
            eprintln!("{}", error_msg);
        }
    }
}
