use pest::Parser;
use pest_derive::Parser;
use std::num::ParseIntError;

use super::{LexerError, Token, TokenWrap};

#[derive(Parser)]
#[allow(missing_docs)]
#[grammar = "lexer.pest"]
struct SysYLexer;

pub fn tokenize(input: &str) -> (Vec<TokenWrap>, Vec<LexerError>) {
    let mut token_list: Vec<TokenWrap> = Vec::new();
    let mut error_list: Vec<LexerError> = Vec::new();
    let mut expr = match SysYLexer::parse(Rule::expr, input) {
        Ok(o) => o,
        Err(_) => {
            return (token_list, error_list);
        }
    };
    let expr = expr.next().unwrap();
    for token in expr.into_inner() {
        match token.as_rule() {
            Rule::token => {
                for token in token.into_inner() {
                    match token.as_rule() {
                        Rule::r#const => {
                            token_list.push(TokenWrap::new(Token::Const, token.line_col().into()))
                        }
                        Rule::int => {
                            token_list.push(TokenWrap::new(Token::Int, token.line_col().into()))
                        }
                        Rule::void => {
                            token_list.push(TokenWrap::new(Token::Void, token.line_col().into()))
                        }
                        Rule::r#if => {
                            token_list.push(TokenWrap::new(Token::If, token.line_col().into()))
                        }
                        Rule::r#else => {
                            token_list.push(TokenWrap::new(Token::Else, token.line_col().into()))
                        }
                        Rule::r#while => {
                            token_list.push(TokenWrap::new(Token::While, token.line_col().into()))
                        }
                        Rule::r#break => {
                            token_list.push(TokenWrap::new(Token::Break, token.line_col().into()))
                        }
                        Rule::r#continue => token_list
                            .push(TokenWrap::new(Token::Continue, token.line_col().into())),
                        Rule::r#return => {
                            token_list.push(TokenWrap::new(Token::Return, token.line_col().into()))
                        }
                        Rule::eq => {
                            token_list.push(TokenWrap::new(Token::Eq, token.line_col().into()))
                        }
                        Rule::neq => {
                            token_list.push(TokenWrap::new(Token::Neq, token.line_col().into()))
                        }
                        Rule::le => {
                            token_list.push(TokenWrap::new(Token::Le, token.line_col().into()))
                        }
                        Rule::ge => {
                            token_list.push(TokenWrap::new(Token::Ge, token.line_col().into()))
                        }
                        Rule::and => {
                            token_list.push(TokenWrap::new(Token::And, token.line_col().into()))
                        }
                        Rule::or => {
                            token_list.push(TokenWrap::new(Token::Or, token.line_col().into()))
                        }
                        Rule::line_comment => (),
                        Rule::mutiline_comment => (),
                        Rule::plus => {
                            token_list.push(TokenWrap::new(Token::Plus, token.line_col().into()))
                        }
                        Rule::minus => {
                            token_list.push(TokenWrap::new(Token::Minus, token.line_col().into()))
                        }
                        Rule::mul => {
                            token_list.push(TokenWrap::new(Token::Mul, token.line_col().into()))
                        }
                        Rule::div => {
                            token_list.push(TokenWrap::new(Token::Div, token.line_col().into()))
                        }
                        Rule::r#mod => {
                            token_list.push(TokenWrap::new(Token::Mod, token.line_col().into()))
                        }
                        Rule::assign => {
                            token_list.push(TokenWrap::new(Token::Assign, token.line_col().into()))
                        }
                        Rule::lt => {
                            token_list.push(TokenWrap::new(Token::Lt, token.line_col().into()))
                        }
                        Rule::gt => {
                            token_list.push(TokenWrap::new(Token::Gt, token.line_col().into()))
                        }
                        Rule::not => {
                            token_list.push(TokenWrap::new(Token::Not, token.line_col().into()))
                        }
                        Rule::l_paren => {
                            token_list.push(TokenWrap::new(Token::LParen, token.line_col().into()))
                        }
                        Rule::r_paren => {
                            token_list.push(TokenWrap::new(Token::RParen, token.line_col().into()))
                        }
                        Rule::l_brace => {
                            token_list.push(TokenWrap::new(Token::LBrace, token.line_col().into()))
                        }
                        Rule::r_brace => {
                            token_list.push(TokenWrap::new(Token::RBrace, token.line_col().into()))
                        }
                        Rule::l_brackt => {
                            token_list.push(TokenWrap::new(Token::LBrackt, token.line_col().into()))
                        }
                        Rule::r_brackt => {
                            token_list.push(TokenWrap::new(Token::RBrackt, token.line_col().into()))
                        }
                        Rule::comma => {
                            token_list.push(TokenWrap::new(Token::Comma, token.line_col().into()))
                        }
                        Rule::semicolon => token_list
                            .push(TokenWrap::new(Token::Semicolon, token.line_col().into())),
                        Rule::integer_const => token_list.push(TokenWrap::new(
                            Token::IntegerConst(parse_int(token.as_str()).unwrap()),
                            token.line_col().into(),
                        )),
                        Rule::ident => token_list.push(TokenWrap::new(
                            Token::Ident(token.as_str().to_string()),
                            token.line_col().into(),
                        )),
                        _ => unreachable!(),
                    }
                }
            }
            Rule::invalid => error_list.push(LexerError {
                message: format!("invalid {}", token.as_str()),
                position: token.line_col().into(),
            }),
            Rule::EOI => (),
            _ => unreachable!(),
        }
    }
    (token_list, error_list)
}

fn parse_int(s: &str) -> Result<i64, ParseIntError> {
    let s = s.trim();

    if let Some(hex) = s.strip_prefix("0x").or_else(|| s.strip_prefix("0X")) {
        return i64::from_str_radix(hex, 16);
    } else if let Some(bin) = s.strip_prefix("0b").or_else(|| s.strip_prefix("0B")) {
        return i64::from_str_radix(bin, 2);
    } else if let Some(oct) = s.strip_prefix("0") {
        if oct.len() == 0 {
            return Ok(0);
        }
        return i64::from_str_radix(oct, 8);
    }
    s.parse::<i64>()
}
