mod lexer;

use std::{env, fs::File, io::Read};

use lexer::tokenize;

fn extract_error_char(content: &str, error: &pest::error::Error<lexer::Rule>) -> String {
    let (line_num, col_num) = match error.line_col {
        pest::error::LineColLocation::Pos((line, col)) => (line, col),
        pest::error::LineColLocation::Span((line, col), _) => (line, col),
    };
    
    let lines: Vec<&str> = content.lines().collect();
    if line_num > 0 && line_num <= lines.len() {
        let line = lines[line_num - 1];
        if col_num > 0 && col_num <= line.len() {
            let char_at_pos = line.chars().nth(col_num - 1).unwrap_or('?');
            return char_at_pos.to_string();
        }
    }
    "?".to_string()
}

fn main() -> std::io::Result<()> {

    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    let mut file = File::open(&args[1])?;
    let mut content = String::new();
    file.read_to_string(&mut content)?;
    match tokenize(&content) {
        Ok(tokens) => {
            // println!("词法分析结果:");
            for token in tokens {
                token.print();
            }
        }
        Err(e) => {
            // 从pest错误中提取行号和错误字符
            let line_num = match e.line_col {
                pest::error::LineColLocation::Pos((line, _)) => line,
                pest::error::LineColLocation::Span((line, _), _) => line,
            };
            // 从错误信息中尝试提取出现问题的字符
            let error_char = extract_error_char(&content, &e);
            println!("Error type A at Line {}: Mysterious character \"{}\".", line_num, error_char);
        }
    }
    Ok(())
}

#[cfg(test)]
mod tests {
    use super::lexer::*;

    #[test]
    fn test_complete_program() {
        let input = r#"
            int main() {
                const int x = 42;
                if (x != 0) {
                    return x * 2;
                }
                return 0;
            }
        "#;
        
        let tokens = tokenize(input).unwrap();
        assert!(!tokens.is_empty());
        
        // 验证包含预期的关键字
        let has_int = tokens.iter().any(|t| matches!(t.token_type, TokenType::Int));
        let has_const = tokens.iter().any(|t| matches!(t.token_type, TokenType::Const));
        let has_if = tokens.iter().any(|t| matches!(t.token_type, TokenType::If));
        let has_return = tokens.iter().any(|t| matches!(t.token_type, TokenType::Return));
        
        assert!(has_int && has_const && has_if && has_return);
    }
}