mod lexer;
use lexer::Rule;
pub(crate) use lexer::{tokenize, Token};
use std::fmt::Display;

pub(crate) fn print_tokens(tokens: &Vec<Token>) {
    let lexierr_iter = tokens.iter().filter(|t| t.pair.as_rule() == Rule::GARBAGE);
    if lexierr_iter.clone().count() > 0 {
        for err in lexierr_iter {
            eprintln!(
                "Error type A at Line {}: Chababa~",
                err.pair.as_span().start_pos().line_col().0
            );
        }
        return;
    }
    for token in tokens {
        let token_pos = token.pair.as_span().start_pos().line_col().0;
        eprintln!("{} at Line {}.", token, token_pos);
    }
}

impl<'a> Display for Token<'a> {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        let token_rule = self.pair.as_rule();
        match token_rule {
            Rule::INTEGER_CONST => write!(
                f,
                "{:?} {}",
                token_rule,
                match self.pair.clone().into_inner().next().unwrap() {
                    hex_pair if hex_pair.as_rule() == Rule::HEX_CONST =>
                        i64::from_str_radix(&hex_pair.as_str()[2..], 16).unwrap(),
                    oct_pair if oct_pair.as_rule() == Rule::OCT_CONST =>
                        i64::from_str_radix(&oct_pair.as_str()[1..], 8).unwrap(),
                    dec_pair if dec_pair.as_rule() == Rule::DEC_CONST =>
                        dec_pair.as_str().parse::<i64>().unwrap(),
                    _ => unreachable!(),
                }
            ),
            _ => write!(f, "{:?} {}", token_rule, self.pair.as_str()),
        }
    }
}
