use pest::{error::{InputLocation, LineColLocation}, Parser};
use pest_derive::Parser;


#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct ExpressionParser;

#[derive(Clone, Debug, PartialEq, Eq)]
#[allow(non_camel_case_types)]
pub enum TokenKind {
    COMMENT,
    LE, GE, LT, GT, PLUS, MINUS, MUL, DIV, MOD, EQ, ASSIGN, NEQ, NOT, AND, OR,
    INTEGER_CONST,
    IDENT,
    L_PAREN, R_PAREN, L_BRACE, R_BRACE, L_BRACKT, R_BRACKT, COMMA, SEMICOLON,
    CONST, INT, VOID, IF, ELSE, WHILE, BREAK, CONTINUE, RETURN,
    _EOI
}

#[derive(Debug, PartialEq)]
pub struct LexToken {
    pub kind: TokenKind,
    pub lexeme: String,
    pub line: usize,
}

#[derive(Debug, PartialEq)]
pub struct LexFail {
    pub msg: String,
    pub line: usize,
}

pub fn lex_file(input: &str){
    match tokenize(&input) {
        Ok(tokens) => {
            tokens.iter().for_each(|token| {
                match token.kind {
                    // TokenKind::COMMENT => (),
                    TokenKind::INTEGER_CONST => {
                        let (radix, num_str) = match &token.lexeme[..] {
                            s if s.starts_with("0x") || s.starts_with("0X") => (16, &s[2..]),
                            s if s == "0" => (10, s),
                            s if s.starts_with("0") => (8, &s[1..]),
                            s => (10, s)
                        };
                        eprintln!(
                            "{:?} {} at Line {}.", 
                                token.kind, 
                                i128::from_str_radix(num_str, radix).unwrap(), 
                                token.line
                        )
                    }
                    _ => eprintln!("{:?} {} at Line {}.", token.kind, token.lexeme, token.line),
                }
            });
        }
        Err(lexerrs) => {
            lexerrs.iter().for_each(
                | lexerr |{
                    eprintln!("Error type A at Line {}: {}.", lexerr.line, lexerr.msg);
                }
            );
        }
    }
}

pub fn tokenize(input: &str) -> Result<Vec<LexToken>, Vec<LexFail>>{
    if let Ok(pair) = ExpressionParser::parse(Rule::FILE, input) {
        Ok(pair
            .map(|p| {
                let lexeme = p.as_str();
                let line = p.as_span().start_pos().line_col().0;
                let kind = rule_to_kind(p.as_rule(), lexeme);
                LexToken { 
                    kind, 
                    lexeme: lexeme.to_string(), 
                    line 
                }
            })
            .filter(|t| t.kind != TokenKind::_EOI)
            .filter(|t| t.kind != TokenKind::COMMENT)
            .collect()
        )
    } else {
        Err(find_all_err_by_lexing(input))
    }
}


fn find_all_err_by_lexing(input: &str) -> Vec<LexFail> {
    let mut lex_errors = Vec::new();
    let total_len = input.len();
    let mut current_pos = 0;
    let mut current_line = 1;

    while current_pos < total_len {
        let remaining_input = &input[current_pos..];

        match ExpressionParser::parse(Rule::FILE, remaining_input) {
            Ok(_) => {
                // 解析成功时直接结束，不关心正确结果
                current_pos = total_len;
            }
            Err(err) => {
                let (line, _column) = match err.line_col{
                    LineColLocation::Pos(line_col) => line_col,
                    LineColLocation::Span(_, _) => unreachable!(),
                };

                current_line += line -1;

                let pos = match err.location {
                    InputLocation::Pos(pos) => pos,
                    InputLocation::Span(_) => unreachable!(),
                };

                lex_errors.push(LexFail{msg: "杂鱼～".to_string(), line: current_line});
                
                current_pos += pos + 1;
            }
        }
    }

    lex_errors
}


fn rule_to_kind(rule: Rule, lexme: &str) -> TokenKind {
    match rule {
        Rule::INTEGER_CONST => TokenKind::INTEGER_CONST,
        Rule::COMMENT => TokenKind::COMMENT,

        Rule::LE => TokenKind::LE,
        Rule::GE => TokenKind::GE,
        Rule::LT => TokenKind::LT,
        Rule::GT => TokenKind::GT,
        Rule::PLUS => TokenKind::PLUS,
        Rule::MINUS => TokenKind::MINUS,
        Rule::MUL => TokenKind::MUL,
        Rule::DIV => TokenKind::DIV,
        Rule::MOD => TokenKind::MOD,
        Rule::EQ  => TokenKind::EQ,
        Rule::ASSIGN => TokenKind::ASSIGN,
        Rule::NEQ => TokenKind::NEQ,
        Rule::NOT => TokenKind::NOT,
        Rule::AND => TokenKind::AND,
        Rule::OR => TokenKind::OR,
        Rule::L_PAREN => TokenKind::L_PAREN,
        Rule::R_PAREN => TokenKind::R_PAREN,
        Rule::L_BRACE => TokenKind::L_BRACE,
        Rule::R_BRACE => TokenKind::R_BRACE,
        Rule::L_BRACKT => TokenKind::L_BRACKT,
        Rule::R_BRACKT => TokenKind::R_BRACKT,
        Rule::COMMA => TokenKind::COMMA,
        Rule::SEMICOLON => TokenKind::SEMICOLON,

        Rule::IDENT => match lexme {
            "const"    => TokenKind::CONST,
            "int"      => TokenKind::INT,
            "void"     => TokenKind::VOID,
            "if"       => TokenKind::IF,
            "else"     => TokenKind::ELSE,
            "while"    => TokenKind::WHILE,
            "break"    => TokenKind::BREAK,
            "continue" => TokenKind::CONTINUE,
            "return"   => TokenKind::RETURN,
            _          => TokenKind::IDENT,
        },

        Rule::EOI => TokenKind::_EOI,
        _ => { eprintln!("{:?}", rule); unreachable!() },
    }
}