mod names;

use std::process::exit;
use pest::error::LineColLocation::Pos;
use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct ExpressionParser {}

#[derive(PartialEq)]
#[derive(Debug)]
pub enum Token {
    Const,
    Int,
    Void,
    If,
    Else,
    While,
    Break,
    Continue,
    Return,
    Plus,
    Minus,
    Mul,
    Div,
    Mod,
    Assign,
    Eq,
    Neq,
    Lt,
    Gt,
    Le,
    Ge,
    Not,
    And,
    Or,
    Semicolon,
    Comma,
    LParen,
    RParen,
    LBrace,
    RBrace,
    LBrackt,
    RBrackt,
    Ident(String),
    IntConst(i32),
}

impl ExpressionParser {
    pub fn tokenize(input: &str) -> Vec<Token> {
        let pair = Self::parse(Rule::file, input).unwrap_or_else(|err| {
            if let Pos((col, _)) = err.line_col {
                eprintln!("Error type A at Line {col}: Mysterious character at '{}'.", err.line());
            }
            exit(0);
        }).next().unwrap();

        let mut res = Vec::new();

        for record in pair.clone().into_inner() {
            if let Some(t) = Self::get_token_name(record.clone()) {
                res.push(t);
            }
        }

        res
    }

    pub fn tokenize_print(input: &str) {
        let pair = Self::parse(Rule::file, input).unwrap_or_else(|err| {
            if let Pos((col, _)) = err.line_col {
                eprintln!("Error type A at Line {col}: Mysterious character at '{}'.", err.line());
            }
            exit(0);
        }).next().unwrap();

        for record in pair.clone().into_inner() {
            let (name, value) = Self::get_name_and_value(record.clone());
            if name.is_empty() {
                continue;
            }
            eprintln!("{} {} at Line {}.", name, value, record.line_col().0)
        }
    }
}

#[test]
pub fn test_parser() {
    let input = r#"const int void if else while break continue return
    +-*/%= == != < > <= >= !&&||(){}[],;"#;

    let res = ExpressionParser::tokenize(input);
    assert_eq!(res, vec![
        Token::Const, Token::Int, Token::Void, Token::If, Token::Else, Token::While,
        Token::Break, Token::Continue, Token::Return, Token::Plus, Token::Minus,
        Token::Mul, Token::Div, Token::Mod, Token::Assign, Token::Eq, Token::Neq,
        Token::Lt, Token::Gt, Token::Le, Token::Ge, Token::Not, Token::And,
        Token::Or, Token::LParen, Token::RParen, Token::LBrace, Token::RBrace,
        Token::LBrackt, Token::RBrackt, Token::Comma, Token::Semicolon
    ]);

    let input = r#"012 0x12 123 abc 001123 _11ccc 2i 08"#;
    let res = ExpressionParser::tokenize(input);
    assert_eq!(res, vec![
        Token::IntConst(10), Token::IntConst(18), Token::IntConst(123),
        Token::Ident("abc".to_string()), Token::IntConst(0), Token::IntConst(595),
        Token::Ident("_11ccc".to_string()), Token::IntConst(2), Token::Ident("i".to_string()),
        Token::IntConst(0), Token::IntConst(8),
    ]);
}