use std::{env, fs};

use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"] // 指向你的 .pest 文件
struct SysYLexer;

fn print_token_info(token_stream: pest::iterators::Pair<Rule>) {
    for pair in token_stream.into_inner() {
        let token = match pair.into_inner().peek() {
            Some(inner) => inner,
            None => continue,
        };
        let value = match token.as_rule() {
            Rule::INTEGER_CONST => {
                let s = token.as_str();
                if s.starts_with("0x") || s.starts_with("0X") {
                    i64::from_str_radix(&s[2..], 16).unwrap().to_string()
                } else if s.starts_with('0') && s.len() > 1 {
                    i64::from_str_radix(&s[1..], 8).unwrap().to_string()
                } else {
                    s.to_string()
                }
            }
            _ => token.as_str().to_string(),
        };
        eprintln!(
            "{:?} {} at Line {}.",
            token.as_rule(),
            value,
            token.as_span().start_pos().line_col().0
        );
    }
}

fn print_error_token_info(token_stream: pest::iterators::Pair<Rule>) {
    for pair in token_stream.into_inner() {
        if pair.as_rule() == Rule::ERROR_TOKEN {
            eprintln!(
                "Error type A at Line {}: invalid token {}",
                pair.as_span().start_pos().line_col().0,
                pair.as_str()
            );
        }
    }
}

fn contain_error_token(token_stream: pest::iterators::Pair<Rule>) -> bool {
    for pair in token_stream.into_inner() {
        if pair.as_rule() == Rule::ERROR_TOKEN {
            return true;
        }
    }
    false
}

pub(crate) fn lexer_parse() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 获取文件名
    let filename = &args[1];

    // 读取输入文件
    let input = fs::read_to_string(filename).expect("Failed to read file");

    // 词法分析
    let pairs = SysYLexer::parse(Rule::TOKEN_STREAM, &input).unwrap_or_else(|e| panic!("{}", e));

    if let Some(token_stream) = pairs.peek() {
        if contain_error_token(token_stream.clone()) {
            print_error_token_info(token_stream.clone());
        } else {
            print_token_info(token_stream.clone());
        }
    }
}
