use pest_derive::Parser;
use pest::Parser;

#[derive(Parser)]
#[grammar = "sysY.pest"]
pub struct SysYParser1;

#[derive(Debug, PartialEq)]
pub struct Token {
    pub rule: String,   // 规则名
    pub value: String,  // 符号值
    pub line: usize,    // 行号
}

pub fn tokenize(input: &str) -> Vec<Token> {

    //pest_ascii_tree::print_ascii_tree(Ok(SysYParser1::parse(Rule::CompUnit, input).unwrap()));

    let mut errs = Vec::new();
    let mut input_owned = input.to_string();
    let original_input = input.clone();
    let original_lines: Vec<String> = original_input.lines().map(|l| l.to_string()).collect();
    loop {
        match SysYParser1::parse(Rule::CompUnit, &input_owned) {
            Ok(pairs) => {
                // Parsing succeeded, break and use these pairs
                let pairs = pairs;
                // Move pairs out of the loop for further processing
                break pairs;
            }
            Err(e) => {
                let (line, col) = match e.line_col {
                    pest::error::LineColLocation::Pos((line, col)) => (line, col),
                    pest::error::LineColLocation::Span((line, col), _) => (line, col),
                };
                let mut lines: Vec<String> = input_owned.lines().map(|l| l.to_string()).collect();
                errs.push(format!("Error type A at Line {}: Col: {}, {}", line,col, original_lines.get(line.saturating_sub(1)).unwrap_or(&"".to_string())));
                //eprintln!("Error: {}", e);
                if line == 0 || line > lines.len() || col == 0 {
                    return Vec::new();
                }
                let line_idx = line - 1;
                let col_idx = col - 1;
                let line_str = &mut lines[line_idx];
                if col_idx < line_str.len() {
                    line_str.replace_range(col_idx..=col_idx, " ");
                }
                input_owned = lines.join("\n");
            }
        }
    };
    let pairs = SysYParser1::parse(Rule::CompUnit, &input_owned).unwrap();
    let mut tokens = Vec::new();

    fn extract_tokens(pair: pest::iterators::Pair<Rule>, tokens: &mut Vec<Token>) {
        let rule_name = format!("{:?}", pair.as_rule());
        let value = pair.as_str().to_string();
        let line = pair.as_span().start_pos().line_col().0;
        // 只收集终结符token（即叶子节点），否则递归
        match pair.as_rule() {
            Rule::CONST | Rule::INT | Rule::FLOAT | Rule::VOID | Rule::IF | Rule::ELSE | Rule::WHILE | Rule::BREAK | Rule::CONTINUE | Rule::RETURN |
            Rule::PLUS | Rule::MINUS | Rule::MUL | Rule::DIV | Rule::MOD | Rule::ASSIGN | Rule::EQ | Rule::NEQ | Rule::LT | Rule::GT | Rule::LE | Rule::GE |
            Rule::NOT | Rule::AND | Rule::OR | Rule::L_PAREN | Rule::R_PAREN | Rule::L_BRACE | Rule::R_BRACE | Rule::L_BRACKT | Rule::R_BRACKT |
            Rule::COMMA | Rule::SEMICOLON | Rule::IDENT | Rule::INTEGER_CONST => {
                if let Rule::INTEGER_CONST = pair.as_rule() {
                    let value = if value.starts_with("0x") || value.starts_with("0X") {
                        // Hexadecimal
                        i64::from_str_radix(&value[2..], 16).map(|v| v.to_string()).unwrap_or(value.clone())
                    } else if value.starts_with("0") && value.len() > 1 {
                        // Octal
                        i64::from_str_radix(&value[1..], 8).map(|v| v.to_string()).unwrap_or(value.clone())
                    } else {
                        value.clone()
                    };
                    tokens.push(Token { rule: rule_name, value, line });
                    return;
                }
                tokens.push(Token { rule: rule_name, value, line });
            }
            _ => {
                for inner in pair.into_inner() {
                    extract_tokens(inner, tokens);
                }
            }
        }
    }

    for pair in pairs {
        extract_tokens(pair, &mut tokens);
    }
    if !errs.is_empty() {
        for err in errs {
            eprintln!("{}", err);
        }
        Vec::new()
    } else {
        tokens
    }
}
