//! # Lexical Analysis Module
//!
//! This module provides lexical analysis functionality for the SysY language,
//! parsing input source code strings into token sequences.
//! Lexical analysis rules are implemented using the Pest(2.7) parser generator.
//!
//! # Main Features
//!
//! - Recognizes SysY language keywords, identifiers, constants and other lexical units
//! - Handles decimal, octal, and hexadecimal integer constants
//! - Detects and reports lexical errors (such as invalid characters)
//! - Provides detailed line number information for error location
//! 

use crate::token::Token;
use log::{debug, error, warn};
use pest::Parser;
use pest::iterators::Pairs;
use pest_derive::Parser;

/// Lexical analyzer for the SysY language
///
/// Implemented based on the Pest parser generator, using grammar rules defined
/// in the `lexer.pest` file for lexical analysis. This struct implements the
/// `Parser` trait to provide parsing functionality.
#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct SysYLexer;

/// Performs lexical analysis on the input string
///
/// Parses the input SysY source code string into a token sequence while
/// detecting and collecting lexical errors.
///
/// # Arguments
///
/// * `input`: The SysY source code string to be analyzed
///
/// # Returns
///
/// Returns a tuple `(tokens, errors)`:
/// - `tokens`: List of successfully recognized tokens, each element contains `(Token, line_number, original_text)`
/// - `errors`: List of lexical error information, containing error descriptions and position information
///
/// # Examples
///
/// ```rust
/// use compiler::lexer::tokenize;
/// use compiler::token::Token;
///
/// let input = "int main() { return 0; }";
/// let (tokens, errors) = tokenize(input);
///
/// // Verify that there are no lexical errors.
/// assert!(errors.is_empty());
///
/// // Verify the specific token results parsed.
/// assert_eq!(tokens.len(), 9);
/// assert_eq!(tokens[0].0, Token::Int);
/// assert_eq!(tokens[1].0, Token::Ident("main".to_string()));
/// assert_eq!(tokens[2].0, Token::LParen);
/// assert_eq!(tokens[3].0, Token::RParen);
/// assert_eq!(tokens[4].0, Token::LBrace);
/// assert_eq!(tokens[5].0, Token::Return);
/// assert_eq!(tokens[6].0, Token::Integer(0));
/// assert_eq!(tokens[7].0, Token::Semicolon);
/// assert_eq!(tokens[8].0, Token::RBrace);
/// ```
///
/// # Panics
/// If such panics occur, you need to find the debug print content from the previous step based on terminal prompts and logs, and add handling logic
/// - May panic when parsing inner Pair's rule (top-level rule) is empty!
/// - May panic when parsing inner Pair's rule (inner token rule) is empty!
/// ```txt
/// Pairs:[
///   Pair {
///       rule: program,    
///       span: Span {
///           str: "int  main(){}\r\n",
///           start: 0,
///           end: 15,
///       },
///       inner: [
///           Pair {
///               rule: token,   #### Top-level rule
///               span: Span {
///                   str: "int",
///                   start: 0,
///                   end: 3,
///               },
///               inner: [
///                   Pair {
///                       rule: INT,   #### Inner token rule
///                       span: Span {
///                           str: "int",
///                           start: 0,
///                           end: 3,
///                       },
///                       inner: [],
///                   },
///               ],
///           },
///           Pair{...},
///           Pair{...},
///           Pair{...},
///           Pair{...},
///           Pair{...},
///         ]
///     }
/// ]
/// ```
///
/// # Program Termination (exit(1))
/// Need to modify code to add matching rules based on error information (error log level) to handle unknown errors
///
/// - When Pest parser encounters serious syntax errors
/// - When matching Pairs-Pair-inner-Pair-rule patterns (SOI(implicit), token, MYSTERIOUS), encounters unknown rules
/// - When matching Pairs-Pair-inner-Pair-inner-Pair-rule patterns (token series), encounters unknown rules
///
pub fn tokenize(input: &str) -> (Vec<(Token, usize, String)>, Vec<String>) {
    debug!("Starting lexical analysis");

    let pairs = SysYLexer::parse(Rule::program, input).unwrap_or_else(|e| {
        error!("Pest parse error: {:?}", e);
        std::process::exit(1);
    });

    let mut tokens = Vec::new();
    let mut errors = Vec::new();

    debug!("Pairs:{:#?}", pairs);

    // Skip top-level parsing rules, directly process pairs inside program (inner)
    let inner_pairs = pairs
        .into_iter()
        .next()
        .expect("The Pest parser failed to parse the top-level program.")
        .into_inner();
    debug!("inner_pairs:{:#?}", inner_pairs);

    for pair in inner_pairs {
        debug!("pair:{:#?}", pair);
        match pair.as_rule() {
            Rule::token => {
                // For token rules, directly process the actual valid token lexical units inside
                process_pairs(pair.into_inner(), &mut tokens);
            }
            Rule::MYSTERIOUS => {
                // Directly handle MYSTERIOUS rules
                let span = pair.as_span();
                let line = span.start_pos().line_col().0;
                let text = pair.as_str().to_string();
                debug!("Found mysterious character at line {}: '{}'", line, text);
                errors.push(format!(
                    "Error type A at Line {}: Mysterious character \"{}\".",
                    line, text
                ));
            }
            Rule::EOI => {
                // EOI indicates end of input, exit loop directly
                debug!("Reached end of input (EOI)");
                break;
            }
            _ => {
                error!(
                    "Program rule appears in position affecting parsing {:#?}",
                    pair
                );
                std::process::exit(1);
            }
        }
    }
    (tokens, errors)
}

fn process_pairs(pairs: Pairs<Rule>, tokens: &mut Vec<(Token, usize, String)>) {
    debug!("Pairs {:#?}", pairs);
    // Directly process a single pair, as the inner of token rule contains only one element
    let pair = pairs
        .into_iter()
        .next()
        .expect("The inner field of a token rule must contain at least one element.");
    // Calculate token position
    let span = pair.as_span();
    let line = span.start_pos().line_col().0;
    let text = pair.as_str().to_string();
    let rule = pair.as_rule();

    debug!("Processing rule {:?} at line {}: '{}'", rule, line, text);

    match rule {
        Rule::CONST => tokens.push((Token::Const, line, text)),
        Rule::INT => tokens.push((Token::Int, line, text)),
        Rule::VOID => tokens.push((Token::Void, line, text)),
        Rule::IF => tokens.push((Token::If, line, text)),
        Rule::ELSE => tokens.push((Token::Else, line, text)),
        Rule::WHILE => tokens.push((Token::While, line, text)),
        Rule::BREAK => tokens.push((Token::Break, line, text)),
        Rule::CONTINUE => tokens.push((Token::Continue, line, text)),
        Rule::RETURN => tokens.push((Token::Return, line, text)),
        Rule::PLUS => tokens.push((Token::Plus, line, text)),
        Rule::MINUS => tokens.push((Token::Minus, line, text)),
        Rule::MUL => tokens.push((Token::Mul, line, text)),
        Rule::DIV => tokens.push((Token::Div, line, text)),
        Rule::MOD => tokens.push((Token::Mod, line, text)),
        Rule::ASSIGN => tokens.push((Token::Assign, line, text)),
        Rule::EQ => tokens.push((Token::Eq, line, text)),
        Rule::NEQ => tokens.push((Token::Neq, line, text)),
        Rule::LT => tokens.push((Token::Lt, line, text)),
        Rule::GT => tokens.push((Token::Gt, line, text)),
        Rule::LE => tokens.push((Token::Le, line, text)),
        Rule::GE => tokens.push((Token::Ge, line, text)),
        Rule::NOT => tokens.push((Token::Not, line, text)),
        Rule::AND => tokens.push((Token::And, line, text)),
        Rule::OR => tokens.push((Token::Or, line, text)),
        Rule::L_PAREN => tokens.push((Token::LParen, line, text)),
        Rule::R_PAREN => tokens.push((Token::RParen, line, text)),
        Rule::L_BRACE => tokens.push((Token::LBrace, line, text)),
        Rule::R_BRACE => tokens.push((Token::RBrace, line, text)),
        Rule::L_BRACKT => tokens.push((Token::LBrackt, line, text)),
        Rule::R_BRACKT => tokens.push((Token::RBrackt, line, text)),
        Rule::COMMA => tokens.push((Token::Comma, line, text)),
        Rule::SEMICOLON => tokens.push((Token::Semicolon, line, text)),
        Rule::IDENT => tokens.push((Token::Ident(text.clone()), line, text)),
        Rule::INTEGER_CONST => {
            let val = if text.starts_with("0x") || text.starts_with("0X") {
                match i64::from_str_radix(&text[2..], 16) {
                    Ok(hex_val) => hex_val,
                    Err(_) => {
                        error!(
                            "Error type B at Line {}: Invalid hexadecimal constant \"{}\".",
                            line, text
                        );
                        return;
                    }
                }
            } else if text.starts_with('0') && text.len() > 1 {
                match i64::from_str_radix(&text[1..], 8) {
                    Ok(oct_val) => oct_val,
                    // TODO Issue 1: According to requirements, 08 should be recognized as two tokens. I don't think this is a good approach and it should be treated as an error. Test cases don't cover such tokens. Other radix matching will not be handled (skip processing) until future experiment requirements mention it.
                    // TODO Solution: Add an errors array parameter to the function to record lexical analysis errors and return them to display to the user via terminal;
                    Err(_) => {
                        error!(
                            "Error type B at Line {}: Invalid octal constant \"{}\".",
                            line, text
                        );
                        for ch in text.chars() {
                            if ch.is_ascii_digit() {
                                let digit_str = ch.to_string();
                                match digit_str.parse::<i64>() {
                                    Ok(digit_val) => {
                                        tokens.push((Token::Integer(digit_val), line, digit_str))
                                    }
                                    Err(e) => error!(
                                        "Failed to parse digit '{}' at line {}: {}",
                                        digit_str, line, e
                                    ),
                                }
                            }
                        }
                        return;
                    }
                }
            } else {
                match text.parse::<i64>() {
                    Ok(dec_val) => dec_val,
                    Err(_) => {
                        warn!(
                            "Error type B at Line {}: Invalid decimal constant \"{}\".",
                            line, text
                        );
                        return;
                    }
                }
            };
            tokens.push((Token::Integer(val), line, val.to_string()));
        }
        _ => {
            error!("Token appears in position affecting parsing {:#?}", pair);
            std::process::exit(1);
        }
    }
}
