use std::{fs, env};
mod lexer_parser;
use lexer_parser::token::tokenize;

fn main() {
    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <source_file>\ncomplier sysY to token\n", args[0]);
        std::process::exit(1);
    }

    let filename = &args[1];
    let contents = fs::read_to_string(filename)
        .expect("Something went wrong\n reading the file");

    match tokenize(&contents) {
        Ok(tokens) => {
            for (token,line) in tokens {
                //[token类型] [token文本] at Line [此token首个字符所在行的行号].
                eprintln!("{} at Line {}.", token,line);
            }
        }
        Err(errors) => {
            for (token,line) in errors {
                eprintln!("Error type A at Line {}: invalid token '{}'", line, token);
            }
            std::process::exit(1);
        }
    }
}