use std::{env, fs};
use pest::Parser;
use pest_derive::Parser;
use pest::error::LineColLocation; 

#[derive(Parser)]
#[grammar = "lexer.pest"] // 指向你的 .pest 文件
struct ExpressionParser;

 #[derive(Debug, PartialEq)]
pub enum Token {
    Integer(i64,String,usize),
    Others(String,String,usize),
}
fn parse_integer(text: &str) -> i64 {
    if text.starts_with("0x") || text.starts_with("0X") {
        // 十六进制: 0x1A, 0XFF 等
        i64::from_str_radix(&text[2..], 16).unwrap_or(0)
    } else if text.starts_with('0') && text.len() > 1 {
        // 八进制: 0123, 0777 等
        i64::from_str_radix(text, 8).unwrap_or(0)
    } else {
        // 十进制: 123, 456 等
        text.parse::<i64>().unwrap_or(0)
    }
}
pub fn tokenize(input: &str) -> Vec<Token>{
    let pairs = ExpressionParser::parse(Rule::tokens, input)
        .unwrap_or_else(|e| {
            match e.line_col {
                LineColLocation::Pos((line, col)) => {
                    eprintln!("Error type A at Line {}:Col {}.", line, col);
                }
                LineColLocation::Span((start_line, start_col), (_,_)) => {
                    eprintln!("Error type A at Line {}:Col {}.", start_line, start_col);
                }
            }
            std::process::exit(1);
        })
        .next() // 获取第一个（也是唯一一个）完整的匹配
        .unwrap(); 
    
    let mut tokens = Vec::new();
    for p in pairs.into_inner() {
        let rule = p.as_rule();
        let text = p.as_str();
        let (line, _) = p.line_col();
        
        match rule {
            Rule::LINE_COMMENT | Rule::MULTILINE_COMMENT => {
                continue; // 跳过这些规则
            } 
            Rule::UNKNOWN => {
                eprintln!("Error type A at Line {}: Mysterious character \"{}\".", line, text);
                std::process::exit(1);
            }
            Rule::INTEGER_CONST =>{
                let value = parse_integer(text);
                let token = Token::Integer(
                    value,
                    format!("{:?}", rule).replace("Rule::", ""),
                    line
                );
                tokens.push(token);
            }
            _ => {
                let token = Token::Others (
                    text.to_string(),
                    format!("{:?}", rule).replace("Rule::", ""),
                    line
                );
                tokens.push(token);
            }
        }
    }
    tokens

}

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 读取输入文件
    let input = fs::read_to_string(&args[1]).expect("Failed to read file");

    // 词法分析
    let tokens = tokenize(&input);
    for token in tokens{
        match token {
            Token::Integer(num_value, rule_name, line_num) => {
                eprintln!("{} {} at Line {}.", rule_name, num_value, line_num);
            }
            Token::Others(text_value, rule_name, line_num) => {
                eprintln!("{} {} at Line {}.", rule_name, text_value, line_num);
            }
        }
    }
}