use pest_derive::Parser;
use pest::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct SysYParser;

#[derive(Debug, PartialEq)]
pub enum Token {
    Integer(i64),
    IDENT(String),
    CONST,
    INT,
    VOID,
    IF,
    ELSE,
    WHILE,
    BREAK,
    CONTINUE,
    RETURN,
    PLUS,
    MINUS,
    MUL,
    DIV,
    MOD,
    ASSIGN,
    EQ,
    NEQ,
    LT,
    GT,
    LE,
    GE,
    NOT,
    AND,
    OR,
    LParen,
    RParen,
    LBrace,
    RBrace,
    LBrackt,
    RBrackt,
    COMMA,
    SEMICOLON,
    UNKNOWN(String),
}

#[derive(Debug)]
pub struct TokenWithLine {
    pub token: Token,
    pub line: usize,
}

fn to_number(token: &str) -> i64 {
    if token.starts_with("0x") || token.starts_with("0X") {
        i64::from_str_radix(&token[2..], 16).unwrap()
    } else if token.starts_with('0') && token.len() > 1 {
        i64::from_str_radix(&token[1..], 8).unwrap()
    } else {
        token.parse().unwrap()
    }
}

pub fn tokenize(input: &str) -> Vec<TokenWithLine> {
    let pair = SysYParser::parse(Rule::token, input)
        .unwrap_or_else(|e| panic!("Parse error: {}", e))
        .next()
        .unwrap();

    let tokens: Vec<TokenWithLine> = pair
        .into_inner()
        .map(|p| {
            let line = p.line_col().0;
            let token = match p.as_rule() {
                Rule::INTEGER_CONST => Token::Integer(to_number(p.as_str())),
                Rule::IDENT => Token::IDENT(p.as_str().into()),
                Rule::CONST => Token::CONST,
                Rule::INT => Token::INT,
                Rule::VOID => Token::VOID,
                Rule::IF => Token::IF,
                Rule::ELSE => Token::ELSE,
                Rule::WHILE => Token::WHILE,
                Rule::BREAK => Token::BREAK,
                Rule::CONTINUE => Token::CONTINUE,
                Rule::RETURN => Token::RETURN,
                Rule::PLUS => Token::PLUS,
                Rule::MINUS => Token::MINUS,
                Rule::MUL => Token::MUL,
                Rule::DIV => Token::DIV,
                Rule::MOD => Token::MOD,
                Rule::ASSIGN => Token::ASSIGN,
                Rule::EQ => Token::EQ,
                Rule::NEQ => Token::NEQ,
                Rule::LT => Token::LT,
                Rule::GT => Token::GT,
                Rule::LE => Token::LE,
                Rule::GE => Token::GE,
                Rule::NOT => Token::NOT,
                Rule::AND => Token::AND,
                Rule::OR => Token::OR,
                Rule::L_PAREN => Token::LParen,
                Rule::R_PAREN => Token::RParen,
                Rule::L_BRACE => Token::LBrace,
                Rule::R_BRACE => Token::RBrace,
                Rule::L_BRACKT => Token::LBrackt,
                Rule::R_BRACKT => Token::RBrackt,
                Rule::COMMA => Token::COMMA,
                Rule::SEMICOLON => Token::SEMICOLON,
                Rule::UNKNOWN => Token::UNKNOWN(p.as_str().into()),
                _ => unreachable!(),
            };
            TokenWithLine { token, line }
        })
        .collect();

    let errors: Vec<_> = tokens
        .iter()
        .filter(|t| matches!(t.token, Token::UNKNOWN(_)))
        .collect();

    if !errors.is_empty() {
        for t in errors {
            if let Token::UNKNOWN(ref text) = t.token {
                eprintln!("Error type A at Line {}: Mysterious character '{}'.", t.line, text);
            }
        }
        std::process::exit(1);
    }

    tokens
}
