use std::{env, fs, vec};

mod lexer;
use lexer::{Token, tokenize};

fn token_collect(tokens: &[Token]) -> String {
    let mut errs: Vec<Token> = vec![];
    let mut toks: Vec<Token> = vec![];
    for t in tokens {
        if t.r() == "ERROR" {
            errs.push(t.clone());
        } else {
            toks.push(t.clone());
        }
    }
    if errs.is_empty(){
        toks.iter().map(|t| t.to_string()).collect::<Vec<_>>().join("\n")
    }
    else {
        errs.iter().map(|t| t.to_string()).collect::<Vec<_>>().join("\n")
    }
}

fn main() {
    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }
    let filename = &args[1];
    let input = fs::read_to_string(filename).expect("Failed to read file");
    let tokens = tokenize(&input);
    eprintln!("{}", token_collect(&tokens));
}
