mod lexer;

use std::env;
use std::fs;
use std::process;

fn main() {
    let args: Vec<String> = env::args().collect();
    
    if args.len() != 2 {
        eprintln!("Usage: {} <input_file>", args[0]);
        process::exit(1);
    }
    
    let filename = &args[1];
    
    let input = match fs::read_to_string(filename) {
        Ok(content) => content,
        Err(e) => {
            eprintln!("Error reading file '{}': {}", filename, e);
            process::exit(1);
        }
    };
    
    match lexer::tokenize(&input) {
        Ok(tokens) => {
            for token in tokens {
                eprintln!("{} {} at Line {}.", token.token_type, token.text, token.line);
            }
        },
        Err(errors) => {
            for error in errors {
                eprintln!("{}", error);
            }
        }
    }
}