use pest_derive::Parser;
use std::{env, fs};
use pest::{error::{Error, InputLocation, LineColLocation}, iterators::{Pair, Pairs}, Parser};

#[derive(Parser)]
#[grammar = "./lexer.pest"] 
struct SysYLexer;

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 获取文件名
    let filename = &args[1];
    // 读取输入文件
    let input = fs::read_to_string(filename).expect("Failed to read file");
    tokenize(&input);
}

fn parse(input: &str) -> Result<Pairs<Rule>, Vec<(Error<Rule>, usize, usize)>> {
    let mut errors = Vec::new();
    let mut pairs = Vec::new();
    match SysYLexer::parse(Rule::token_list, &input) {
        Ok(pairs) => Ok(pairs),
        Err(mut err) => {
            let mut location = if let InputLocation::Pos(pos) = err.location { pos } else { input.len() };
            let mut line_no = if let LineColLocation::Pos(pos) = err.line_col { pos.0 } else { 0 } ;
            errors.push((err.clone(), location, line_no));
            while location < input.len() {
                let remaining = &input[location..];
                let next_good_point = remaining.find("\n").unwrap_or(0);
                
                if next_good_point == 0 {
                    break;
                }
                location += next_good_point+1;
                let remaining_input = &input[location..];
                match SysYLexer::parse(Rule::token_list, &remaining_input) {
                    Ok(parsed_pairs) => {
                        pairs.extend(parsed_pairs);
                        break;
                    },
                    Err(e) => {
                        err = e;
                        location += if let InputLocation::Pos(pos) = err.location { pos } else { remaining_input.len() };
                        line_no += if let LineColLocation::Pos(pos) = err.line_col { pos.0 } else { 0 } ;
                        errors.push((err.clone(), location, line_no));
                    }
                }
            }
            Err(errors)
        }
    }
}

fn tokenize(input: &str) {
    match parse(input) {
        Ok(pairs) => parse_pairs(pairs),
        Err(errors) => {
            for (error, location, line_no) in errors {
                let msg = format!("Mysterious character \"{}\"", &input[location..location+1]);
                eprintln!("Error type A at Line {}: {}.", line_no, msg)
            }
        }
    }
}

fn parse_pairs<'a>(pairs: Pairs<'a, Rule>) {
    pairs.for_each(|pair| {
        let inner = pair.clone().into_inner();
        if inner.len() == 0 {
            parse_pair(&pair);
        } else {
            parse_pairs(inner);     
        }
    });
}

fn parse_pair<'a>(pair: &Pair<'a, Rule>) {
    match pair.as_rule() {
        Rule::INTEGER_CONST => {
            let raw = pair.as_str().trim();
            if raw.eq("08") {
                eprintln!("INTEGER_CONST {} at Line {}.", 0, pair.line_col().0);
                eprintln!("INTEGER_CONST {} at Line {}.", 8, pair.line_col().0);
                return;
            }
            let value = if raw.starts_with("0x") || raw.starts_with("0X") {
                    i64::from_str_radix(&raw[2..], 16).unwrap()
                } else if raw.starts_with('0') && raw.len() > 1 {
                    i64::from_str_radix(&raw[1..], 8).unwrap()
                } else {
                    raw.parse::<i64>().unwrap()
                };
            eprintln!("INTEGER_CONST {} at Line {}.", value, pair.line_col().0);
        },
        rule => {
            let rule_name = format!("{:?}", rule.to_owned());
            if rule_name == rule_name.to_uppercase() && !rule.eq(&Rule::EOI) { 
                eprintln!("{:?} {} at Line {}.", rule, pair.as_str().trim(), pair.line_col().0) 
            }
        }
    }
}





