//! 词法分析器实现 - 基于Pest优化版本

use pest::Parser;
use pest_derive::Parser;
use crate::token::{Token, TokenWithPos};
use crate::utils::convert_to_decimal;

#[derive(Parser)]
#[grammar = "lexer.pest"] 
pub struct SysYLexer;

/// 主要的词法分析函数 - 优化版本
pub fn tokenize(input: &str) -> Result<Vec<TokenWithPos>, String> {
    let mut tokens = Vec::new();
    let mut errors = Vec::new();
    
    match SysYLexer::parse(Rule::program, input) {
        Ok(pairs) => {
            // 遍历program的内容
            for program_pair in pairs {
                // 直接处理program内部的每个项目
                for item_pair in program_pair.into_inner() {
                    match item_pair.as_rule() {
                        // 处理有效的token
                        rule if is_token_rule(rule) => {
                            if let Some(token_with_pos) = create_token(item_pair) {
                                tokens.push(token_with_pos);
                            }
                        }
                        // 处理错误恢复
                        Rule::error_recovery => {
                            let (line, _) = item_pair.line_col();
                            let error_char = item_pair.as_str();
                            errors.push(format!("Error type A at Line {}: Mysterious character \"{}\".", line, error_char));
                        }
                        // 跳过SOI和EOI
                        _ => {}
                    }
                }
            }
        }
        Err(_) => {
            // 理论上不应该到达这里，因为error_recovery规则会捕获所有情况
            return Err("Unexpected parsing failure".to_string());
        }
    }
    
    if errors.is_empty() {
        Ok(tokens)
    } else {
        Err(errors.join("\n"))
    }
}

/// 判断是否为token规则
fn is_token_rule(rule: Rule) -> bool {
    matches!(rule,
        Rule::CONST | Rule::INT | Rule::VOID | Rule::IF | Rule::ELSE | Rule::WHILE |
        Rule::BREAK | Rule::CONTINUE | Rule::RETURN | Rule::PLUS | Rule::MINUS |
        Rule::MUL | Rule::DIV | Rule::MOD | Rule::ASSIGN | Rule::EQ | Rule::NEQ |
        Rule::LT | Rule::GT | Rule::LE | Rule::GE | Rule::NOT | Rule::AND | Rule::OR |
        Rule::L_PAREN | Rule::R_PAREN | Rule::L_BRACE | Rule::R_BRACE |
        Rule::L_BRACKT | Rule::R_BRACKT | Rule::COMMA | Rule::SEMICOLON |
        Rule::IDENT | Rule::INTEGER_CONST
    )
}

/// 创建Token对象
fn create_token(pair: pest::iterators::Pair<Rule>) -> Option<TokenWithPos> {
    let (line, column) = pair.line_col();
    let span = pair.as_str().to_string();
    
    let token = match pair.as_rule() {
        // 关键字
        Rule::CONST => Token::Const, Rule::INT => Token::Int, Rule::VOID => Token::Void,
        Rule::IF => Token::If, Rule::ELSE => Token::Else, Rule::WHILE => Token::While,
        Rule::BREAK => Token::Break, Rule::CONTINUE => Token::Continue, Rule::RETURN => Token::Return,
        
        // 运算符
        Rule::PLUS => Token::Plus, Rule::MINUS => Token::Minus, Rule::MUL => Token::Mul,
        Rule::DIV => Token::Div, Rule::MOD => Token::Mod, Rule::ASSIGN => Token::Assign,
        Rule::EQ => Token::Eq, Rule::NEQ => Token::Neq, Rule::LT => Token::Lt, Rule::GT => Token::Gt,
        Rule::LE => Token::Le, Rule::GE => Token::Ge, Rule::NOT => Token::Not,
        Rule::AND => Token::And, Rule::OR => Token::Or,
        
        // 分隔符
        Rule::L_PAREN => Token::LParen, Rule::R_PAREN => Token::RParen,
        Rule::L_BRACE => Token::LBrace, Rule::R_BRACE => Token::RBrace,
        Rule::L_BRACKT => Token::LBrackt, Rule::R_BRACKT => Token::RBrackt,
        Rule::COMMA => Token::Comma, Rule::SEMICOLON => Token::Semicolon,
        
        // 标识符和常量
        Rule::IDENT => Token::Ident(pair.as_str().to_string()),
        Rule::INTEGER_CONST => {
            let decimal_value = convert_to_decimal(pair.as_str());
            Token::IntegerConst(decimal_value)
        },
        
        _ => return None,
    };
    
    Some(TokenWithPos {
        token, line, column, span,
    })
}

