use pest::Parser;
use pest_derive::Parser;

fn main() {
    let args: Vec<String> = std::env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }
    let filename = &args[1];
    let input = std::fs::read_to_string(filename).expect("Failed to read file");
    tokenize(&input);
}

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct ExpressionParser;
#[derive(Debug, PartialEq)]
pub enum Token {
    Const,
    Int,
    Void,
    If,
    Else,
    While,
    Break,
    Continue,
    Return,
    Eq,
    Neq,
    Le,
    Ge,
    Lt,
    Gt,
    And,
    Or,
    Plus,
    Minus,
    Mul,
    Div,
    Mod,
    Not,
    Assign,
    Lparen,
    Rparen,
    Lbrace,
    Rbrace,
    Lbrack,
    Rbrack,
    Comma,
    Semicolon,
    Ident(String),
    Integer(i64),
}

pub fn tokenize(input: &str) -> Vec<Token> {
    let pair = ExpressionParser::parse(Rule::TOKEN, input)
        .unwrap_or_else(|e| panic!("Parse error: {}", e))
        .next()
        .unwrap()
        .into_inner();
    let illegals = pair
        .clone()
        .filter(|p| p.as_rule() == Rule::ILLEGAL)
        .collect::<Vec<_>>();
    let count = illegals.len();
    if count > 0 {
        for i in illegals {
            let (line, _) = i.line_col();
            eprintln!(
                "Error type A at Line {}: Mysterious character \"{}\".",
                line,
                i.as_str()
            )
        }
        return Vec::new();
    }
    pair.filter(|p| p.as_rule() != Rule::ILLEGAL)
        .map(|p| {
            let (line, _) = p.line_col();
            // println!("{:?}", p);
            let token = match p.as_rule() {
                Rule::EQ => Token::Eq,
                Rule::NEQ => Token::Neq,
                Rule::LE => Token::Le,
                Rule::GE => Token::Ge,
                Rule::LT => Token::Lt,
                Rule::GT => Token::Gt,
                Rule::AND => Token::And,
                Rule::OR => Token::Or,
                Rule::PLUS => Token::Plus,
                Rule::MINUS => Token::Minus,
                Rule::MUL => Token::Mul,
                Rule::DIV => Token::Div,
                Rule::MOD => Token::Mod,
                Rule::NOT => Token::Not,
                Rule::ASSIGN => Token::Assign,
                Rule::LPAREN => Token::Lparen,
                Rule::RPAREN => Token::Rparen,
                Rule::LBRACE => Token::Lbrace,
                Rule::RBRACE => Token::Rbrace,
                Rule::LBRACKT => Token::Lbrack,
                Rule::RBRACKT => Token::Rbrack,
                Rule::COMMA => Token::Comma,
                Rule::SEMICOLON => Token::Semicolon,
                Rule::IDENT => match p.as_str() {
                    "const" => Token::Const,
                    "int" => Token::Int,
                    "void" => Token::Void,
                    "if" => Token::If,
                    "else" => Token::Else,
                    "while" => Token::While,
                    "break" => Token::Break,
                    "continue" => Token::Continue,
                    "return" => Token::Return,
                    _ => Token::Ident(p.as_str().to_string()),
                },
                Rule::DIGIT_HEX => {
                    Token::Integer(i64::from_str_radix(&p.as_str()[2..], 16).unwrap())
                }
                Rule::DIGIT_OCT => {
                    Token::Integer(i64::from_str_radix(&p.as_str()[1..], 8).unwrap())
                }
                Rule::DIGIT_DEC => Token::Integer(i64::from_str_radix(p.as_str(), 10).unwrap()),
                _ => unreachable!("rule : {:?}", p.as_rule()),
            };
            eprintln!("{} at Line {}.", token, line);
            token
        })
        .collect()
}

impl std::fmt::Display for Token {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        match self {
            Token::Const => write!(f, "CONST const"),
            Token::Int => write!(f, "INT int"),
            Token::Void => write!(f, "VOID void"),
            Token::If => write!(f, "IF if"),
            Token::Else => write!(f, "ELSE else"),
            Token::While => write!(f, "WHILE while"),
            Token::Break => write!(f, "BREAK break"),
            Token::Continue => write!(f, "CONTINUE continue"),
            Token::Return => write!(f, "RETURN return"),
            Token::Eq => write!(f, "EQ =="),
            Token::Neq => write!(f, "NEQ !="),
            Token::Le => write!(f, "LE <="),
            Token::Ge => write!(f, "GE >="),
            Token::Lt => write!(f, "LT <"),
            Token::Gt => write!(f, "GT >"),
            Token::And => write!(f, "AND &&"),
            Token::Or => write!(f, "OR ||"),
            Token::Plus => write!(f, "PLUS +"),
            Token::Minus => write!(f, "MINUS -"),
            Token::Mul => write!(f, "MUL *"),
            Token::Div => write!(f, "DIV /"),
            Token::Mod => write!(f, "MOD %"),
            Token::Not => write!(f, "NOT !"),
            Token::Assign => write!(f, "ASSIGN ="),
            Token::Lparen => write!(f, "L_PAREN ("),
            Token::Rparen => write!(f, "R_PAREN )"),
            Token::Lbrace => write!(f, "L_BRACE {{"),
            Token::Rbrace => write!(f, "R_BRACE }}"),
            Token::Lbrack => write!(f, "L_BRACKT ["),
            Token::Rbrack => write!(f, "R_BRACKT ]"),
            Token::Comma => write!(f, "COMMA ,"),
            Token::Semicolon => write!(f, "SEMICOLON ;"),
            Token::Ident(i) => write!(f, "IDENT {}", i),
            Token::Integer(i) => write!(f, "INTEGER_CONST {}", i),
        }
    }
}
