use pest:: Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"] // 指向你的 .pest 文件
struct SysYLexer;

#[derive(Debug)]
pub enum Token {
    // Literals
    IntConst(i32),
    Ident(String),

    // Operators
    Plus,
    Minus,
    Mul,
    Div,
    Mod,
    Assign,
    Eq,
    Neq,
    Lt,
    Gt,
    Le,
    Ge,
    And,
    Or,
    Not,

    // Delimiters
    LParen,
    RParen,
    LBrace,
    RBrace,
    LBracket,
    RBracket,
    Semicolon,
    Comma,

    // Keywords
    Const,
    Int,
    Void,
    If,
    Else,
    While,
    Break,
    Continue,
    Return,

    //Error
    Error(String)
}

impl Token {
    fn name(&self) -> &str {
        match self {
            Token::IntConst(_) => "INTEGER_CONST",
            Token::Ident(_) => "IDENT",

            Token::Plus => "PLUS",
            Token::Minus => "MINUS",
            Token::Mul => "MUL",
            Token::Div => "DIV",
            Token::Mod => "MOD",
            Token::Assign => "ASSIGN",
            Token::Eq => "EQ",
            Token::Neq => "NEQ",
            Token::Lt => "LT",
            Token::Gt => "GT",
            Token::Le => "LE",
            Token::Ge => "GE",
            Token::And => "AND",
            Token::Or => "OR",
            Token::Not => "NOT",

            Token::LParen => "L_PAREN",
            Token::RParen => "R_PAREN",
            Token::LBrace => "L_BRACE",
            Token::RBrace => "R_BRACE",
            Token::LBracket => "L_BRACKT",
            Token::RBracket => "R_BRACKT",
            Token::Semicolon => "SEMICOLON",
            Token::Comma => "COMMA",

            Token::Const => "CONST",
            Token::Int => "INT",
            Token::Void => "VOID",
            Token::If => "IF",
            Token::Else => "ELSE",
            Token::While => "WHILE",
            Token::Break => "BREAK",
            Token::Continue => "CONTINUE",
            Token::Return => "RETURN",
            Token::Error(_) => "ERROR"
        }
    }
    fn content(&self) -> String {
        match self {
            Token::IntConst(i) => i.to_string(), 
            Token::Ident(i) => i.to_string(),

            Token::Plus => "+".to_string(),
            Token::Minus => "-".to_string(),
            Token::Mul => "*".to_string(),
            Token::Div => "/".to_string(),
            Token::Mod => "%".to_string(),
            Token::Assign => "=".to_string(),
            Token::Eq => "==".to_string(),
            Token::Neq => "!=".to_string(),
            Token::Lt => "<".to_string(),
            Token::Gt => ">".to_string(),
            Token::Le => "<=".to_string(),
            Token::Ge => ">=".to_string(),
            Token::And => "&&".to_string(),
            Token::Or => "||".to_string(),
            Token::Not => "!".to_string(),

            Token::LParen => "(".to_string(),
            Token::RParen => ")".to_string(),
            Token::LBrace => "{".to_string(),
            Token::RBrace => "}".to_string(),
            Token::LBracket => "[".to_string(),
            Token::RBracket => "]".to_string(),
            Token::Semicolon => ";".to_string(),
            Token::Comma => ",".to_string(),

            Token::Const => "const".to_string(),
            Token::Int => "int".to_string(),
            Token::Void => "void".to_string(),
            Token::If => "if".to_string(),
            Token::Else => "else".to_string(),
            Token::While => "while".to_string(),
            Token::Break => "break".to_string(),
            Token::Continue => "continue".to_string(),
            Token::Return => "return".to_string(),

            Token::Error(e) => e.to_string()
        }
    }
}


pub fn tokenize(input: &str) -> Result<Vec<(usize, Token)>, Vec<(usize, Token)>> {
    let pairs = SysYLexer::parse(Rule::program, input)
        .unwrap_or_else(|e| panic!("Parse error: {}", e))
        .next()
        .unwrap();
    // println!("{pairs:?}");

    let mut line = 1;
    let mut tokens = Vec::new();
    let mut err = Vec::new();
    for pair in pairs.into_inner() {
        // println!("{pair}");

        if let Rule::WS | Rule::comments = pair.as_rule() {
            line += pair.as_str().chars().filter(|&c| c == '\n').count();
            continue;
        }
        if let Rule::EOI | Rule::program = pair.as_rule() { continue; }

        // Our tokens and comments one more layer than WS
        // So we need go into the inner part of the pairs again
        let pair = pair.into_inner().next().unwrap();
        
        // println!("{pair:?}");
        
        // println!("pair = {pair:?}, pair.as_rule() = {:?}", pair.as_rule());
        match pair.as_rule() {
            Rule::INTEGER_CONST => {
                let s = pair.as_str();
                    let value = if s.starts_with("0x") || s.starts_with("0X") {
                        i32::from_str_radix(&s[2..], 16).unwrap()
                    } else if s.starts_with('0') && s.len() > 1 {
                        i32::from_str_radix(&s[1..], 8).unwrap()
                    } else {
                        s.parse().unwrap()
                    };
                tokens.push((line, Token::IntConst(value)));
            }
            Rule::IDENT => tokens.push((line, Token::Ident(pair.as_str().to_string()))),
            Rule::PLUS => tokens.push((line, Token::Plus)),
            Rule::MINUS => tokens.push((line, Token::Minus)),
            Rule::MUL => tokens.push((line, Token::Mul)),
            Rule::DIV => tokens.push((line, Token::Div)),
            Rule::MOD => tokens.push((line, Token::Mod)),
            Rule::ASSIGN => tokens.push((line, Token::Assign)),
            Rule::EQ => tokens.push((line, Token::Eq)),
            Rule::NEQ => tokens.push((line, Token::Neq)),
            Rule::LT => tokens.push((line, Token::Lt)),
            Rule::GT => tokens.push((line, Token::Gt)),
            Rule::LE => tokens.push((line, Token::Le)),
            Rule::GE => tokens.push((line, Token::Ge)),
            Rule::AND => tokens.push((line, Token::And)),
            Rule::OR => tokens.push((line, Token::Or)),
            Rule::NOT => tokens.push((line, Token::Not)),
            Rule::L_PAREN => tokens.push((line, Token::LParen)),
            Rule::R_PAREN => tokens.push((line, Token::RParen)),
            Rule::L_BRACE => tokens.push((line, Token::LBrace)),
            Rule::R_BRACE => tokens.push((line, Token::RBrace)),
            Rule::L_BRACKT => tokens.push((line, Token::LBracket)),
            Rule::R_BRACKT => tokens.push((line, Token::RBracket)),
            Rule::SEMICOLON => tokens.push((line, Token::Semicolon)),
            Rule::COMMA => tokens.push((line, Token::Comma)),
            Rule::CONST => tokens.push((line, Token::Const)),
            Rule::INT => tokens.push((line, Token::Int)),
            Rule::VOID => tokens.push((line, Token::Void)),
            Rule::IF => tokens.push((line, Token::If)),
            Rule::ELSE => tokens.push((line, Token::Else)),
            Rule::WHILE => tokens.push((line, Token::While)),
            Rule::BREAK => tokens.push((line, Token::Break)),
            Rule::CONTINUE => tokens.push((line, Token::Continue)),
            Rule::RETURN => tokens.push((line, Token::Return)),
            Rule::ERROR => err.push((line, Token::Error(pair.to_string()))),
            _ => ()
        }
    }
    
    if err.is_empty() {
        Ok(tokens)
    }
    else {
        Err(err)
    }
}
