use pest::Parser;
use pest::iterators::Pair;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct SysYLexer;
#[derive(PartialEq, Clone)]
pub struct Token(String, String, usize);

impl Token {
    fn new(r: String, c: String, l: usize) -> Token {
        Token(r, c, l)
    }
    fn empty() -> Token {
        Token("".to_string(), "".to_string(), 0)
    }

    fn is_empty(&self) -> bool {
        self == &Token::empty()
    }

    pub fn r(&self) -> String {
        self.0.clone()
    }
}

impl ToString for Token {
    fn to_string(&self) -> String {
        if self.is_empty() {
            return "".to_string();
        } else {
            let Token(r, c, l) = self;
            match r.as_str() {
                "ERROR" => {
                    format!("Error type A at Line {}: Mysterious character \"{}\"", l, c)
                }
                _ => {
                    format!("{} {} at Line {}.", r, c, l)
                }
            }
        }
    }
}

fn keyword_filter(s: &String) -> &'static str {
    match s.as_str() {
        "const" => "CONST",
        "int" => "INT",
        "void" => "VOID",
        "if" => "IF",
        "else" => "ELSE",
        "while" => "WHILE",
        "break" => "BREAK",
        "continue" => "CONTINUE",
        "return" => "RETURN",
        _ => "IDENT",
    }
}

fn decimalize(num: (&Rule, &String)) -> String {
    let (r, c) = num;
    let dec;
    match r {
        Rule::INTEGER_CONST_DEC => {
            dec = u32::from_str_radix(c, 10);
        }
        Rule::INTEGER_CONST_OCT => {
            let oct_str = &c[1..];
            dec = u32::from_str_radix(oct_str, 8);
        }
        Rule::INTEGER_CONST_HEX => {
            let hex_str = &c[2..];
            dec = u32::from_str_radix(hex_str, 16);
        }
        _ => unreachable!(),
    }
    dec.unwrap().to_string()
}

fn connectors(p: Pair<Rule>) -> Token {
    let r = p.as_rule();
    let c = p.as_str().to_string();
    let l = p.line_col().0;
    match r {
        Rule::L_PAREN => Token::new("L_PAREN".to_string(), c, l),
        Rule::R_PAREN => Token::new("R_PAREN".to_string(), c, l),
        Rule::L_BRACE => Token::new("L_BRACE".to_string(), c, l),
        Rule::R_BRACE => Token::new("R_BRACE".to_string(), c, l),
        Rule::L_BRACKET => Token::new("L_BRACKT".to_string(), c, l),
        Rule::R_BRACKET => Token::new("R_BRACKT".to_string(), c, l),
        Rule::COMMA => Token::new("COMMA".to_string(), c, l),
        Rule::SEMICOLON => Token::new("SEMICOLON".to_string(), c, l),
        _ => unreachable!(),
    }
}

fn operators(p: Pair<Rule>) -> Token {
    let r = p.as_rule();
    match r {
        Rule::OP_TWO_CHAR => {
            let op = p.into_inner().next().unwrap();
            op_two_char(op)
        }
        Rule::OP_ONE_CHAR => {
            let op = p.into_inner().next().unwrap();
            op_one_char(op)
        }
        _ => unreachable!(),
    }
}

fn op_two_char(p: Pair<Rule>) -> Token {
    let r = p.as_rule();
    let c = p.as_str().to_string();
    let l = p.line_col().0;
    match r {
        Rule::EQ => Token::new("EQ".to_string(), c, l),
        Rule::NEQ => Token::new("NEQ".to_string(), c, l),
        Rule::LE => Token::new("LE".to_string(), c, l),
        Rule::GE => Token::new("GE".to_string(), c, l),
        Rule::AND => Token::new("AND".to_string(), c, l),
        Rule::OR => Token::new("OR".to_string(), c, l),
        _ => unreachable!(),
    }
}

fn op_one_char(p: Pair<Rule>) -> Token {
    let r = p.as_rule();
    let c = p.as_str().to_string();
    let l = p.line_col().0;
    match r {
        Rule::PLUS => Token::new("PLUS".to_string(), c, l),
        Rule::MINUS => Token::new("MINUS".to_string(), c, l),
        Rule::MUL => Token::new("MUL".to_string(), c, l),
        Rule::DIV => Token::new("DIV".to_string(), c, l),
        Rule::MOD => Token::new("MOD".to_string(), c, l),
        Rule::ASSIGN => Token::new("ASSIGN".to_string(), c, l),
        Rule::LT => Token::new("LT".to_string(), c, l),
        Rule::GT => Token::new("GT".to_string(), c, l),
        Rule::NOT => Token::new("NOT".to_string(), c, l),
        _ => unreachable!(),
    }
}

fn int_const(p: Pair<Rule>) -> Token {
    let r = p.as_rule();
    let c = p.as_str().to_string();
    let l = p.line_col().0;
    match r {
        _ => Token::new("INTEGER_CONST".to_string(), decimalize((&r, &c)), l),
    }
}

fn top_level(p: Pair<Rule>) -> Token {
    let r = p.as_rule();
    let c = p.as_str().to_string();
    let l = p.line_col().0;
    match r {
        Rule::OPERATORS => {
            let op = p.into_inner().next().unwrap();
            operators(op)
        }
        Rule::CONNECTORS => {
            let conn = p.into_inner().next().unwrap();
            connectors(conn)
        }
        Rule::IDENT => Token::new(keyword_filter(&c).to_string(), c, l),
        Rule::INTEGER_CONST => {
            let ic = p.into_inner().next().unwrap();
            int_const(ic)
        }
        Rule::ERROR => Token::new("ERROR".to_string(), c, l),
        Rule::EOI => Token::empty(),
        _ => unreachable!(),
    }
}

pub fn tokenize(input: &str) -> Vec<Token> {
    let top = SysYLexer::parse(Rule::ACCEPTED, input)
        .unwrap()
        .next()
        .unwrap()
        .into_inner();
    top.map(|p| top_level(p))
        .filter(|t| !t.is_empty())
        .collect::<Vec<Token>>()
}
