use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct SysYLexer;

#[derive(Debug, PartialEq, Clone)]
pub enum Token {
    // Keywords
    Const,
    Int,
    Void,
    If,
    Else,
    While,
    Break,
    Continue,
    Return,
    
    // Operators
    Plus,
    Minus,
    Mul,
    Div,
    Mod,
    Assign,
    Eq,
    Neq,
    Lt,
    Gt,
    Le,
    Ge,
    Not,
    And,
    Or,
    
    // Delimiters
    LParen,
    RParen,
    LBrace,
    RBrace,
    LBracket,
    RBracket,
    Comma,
    Semicolon,
    
    // Literals and Identifiers
    Identifier(String),
    IntegerConstant(i64),
    
    // Comments (usually ignored)
    LineComment(String),
    MultilineComment(String),
}

#[derive(Debug, Clone)]
pub struct TokenWithLocation {
    pub token: Token,
    pub line: usize,
    pub column: usize,
}

pub enum LexResult {
    Success(Vec<TokenWithLocation>),
    Error { line: usize, message: String },
}

pub fn tokenize(input: &str) -> LexResult {
    match SysYLexer::parse(Rule::program, input) {
        Ok(pairs) => {
            let mut tokens = Vec::new();
            
            for pair in pairs {
                collect_tokens(pair, &mut tokens);
            }
            
            LexResult::Success(tokens)
        }
        Err(e) => {
            let line = match e.line_col {
                pest::error::LineColLocation::Pos((line, _)) => line,
                pest::error::LineColLocation::Span((line, _), _) => line,
            };
            LexResult::Error {
                line,
                message: format!("Unexpected character"),
            }
        }
    }
}

fn collect_tokens(pair: pest::iterators::Pair<Rule>, tokens: &mut Vec<TokenWithLocation>) {
    let rule = pair.as_rule();
    
    match rule {
        Rule::token => {
            // Recursively process token children
            for inner_pair in pair.into_inner() {
                collect_tokens(inner_pair, tokens);
            }
        }
        Rule::program => {
            // Recursively process program children
            for inner_pair in pair.into_inner() {
                collect_tokens(inner_pair, tokens);
            }
        }
        Rule::EOI => {
            // End of input, skip
        }
        _ => {
            // This is an actual token
            let (line, column) = pair.line_col();
            if let Some(token) = pair_to_token(pair) {
                // Skip comments as per requirement
                match token {
                    Token::LineComment(_) | Token::MultilineComment(_) => {},
                    _ => {
                        tokens.push(TokenWithLocation {
                            token,
                            line,
                            column,
                        });
                    }
                }
            }
        }
    }
}

fn pair_to_token(pair: pest::iterators::Pair<Rule>) -> Option<Token> {
    match pair.as_rule() {
        Rule::CONST => Some(Token::Const),
        Rule::INT => Some(Token::Int),
        Rule::VOID => Some(Token::Void),
        Rule::IF => Some(Token::If),
        Rule::ELSE => Some(Token::Else),
        Rule::WHILE => Some(Token::While),
        Rule::BREAK => Some(Token::Break),
        Rule::CONTINUE => Some(Token::Continue),
        Rule::RETURN => Some(Token::Return),
        
        Rule::PLUS => Some(Token::Plus),
        Rule::MINUS => Some(Token::Minus),
        Rule::MUL => Some(Token::Mul),
        Rule::DIV => Some(Token::Div),
        Rule::MOD => Some(Token::Mod),
        Rule::ASSIGN => Some(Token::Assign),
        Rule::EQ => Some(Token::Eq),
        Rule::NEQ => Some(Token::Neq),
        Rule::LT => Some(Token::Lt),
        Rule::GT => Some(Token::Gt),
        Rule::LE => Some(Token::Le),
        Rule::GE => Some(Token::Ge),
        Rule::NOT => Some(Token::Not),
        Rule::AND => Some(Token::And),
        Rule::OR => Some(Token::Or),
        
        Rule::L_PAREN => Some(Token::LParen),
        Rule::R_PAREN => Some(Token::RParen),
        Rule::L_BRACE => Some(Token::LBrace),
        Rule::R_BRACE => Some(Token::RBrace),
        Rule::L_BRACKT => Some(Token::LBracket),
        Rule::R_BRACKT => Some(Token::RBracket),
        Rule::COMMA => Some(Token::Comma),
        Rule::SEMICOLON => Some(Token::Semicolon),
        
        Rule::IDENT => Some(Token::Identifier(pair.as_str().to_string())),
        Rule::INTEGER_CONST => {
            let text = pair.as_str();
            let value = if text.starts_with("0x") || text.starts_with("0X") {
                // Hexadecimal
                i64::from_str_radix(&text[2..], 16).ok()?
            } else if text.starts_with("0") && text.len() > 1 {
                // Octal
                i64::from_str_radix(text, 8).ok()?
            } else {
                // Decimal
                text.parse().ok()?
            };
            Some(Token::IntegerConstant(value))
        },
        
        Rule::LINE_COMMENT => Some(Token::LineComment(pair.as_str().to_string())),
        Rule::MULTILINE_COMMENT => Some(Token::MultilineComment(pair.as_str().to_string())),
        
        _ => None, // Skip whitespace and other ignored tokens
    }
}
