use pest::iterators::Pairs;
use pest::{Parser, Token};
use pest_derive::Parser;
use std::{env, fs};

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct ExpressionParser;

fn visit_pairs(pairs: Pairs<'_, Rule>) {
    for pair in pairs {
        if let Rule::WHITESPACE = pair.as_rule() {
            continue;
        }
        if pair.clone().tokens().len() == 2 {
        } else {
            visit_pairs(pair.into_inner());
            continue;
        }
        let start = pair.clone().tokens().next().unwrap();
        let (rule, pos) = match &start {
            Token::Start { rule, pos } => (rule, pos),
            Token::End { .. } => {
                unreachable!()
            }
        };
        let token = match &rule {
            Rule::INTEGER_CONST => {
                let s = pair.as_str();
                s.to_owned()
            },
            _ => pair.as_str().to_owned(),
        };
        eprintln!("{:?} {} at Line {}.", rule, token, pos.line_col().0);
    }
}

pub fn tokenize(input: &str) {
    match ExpressionParser::parse(Rule::FUNC_DEF, input) {
        Ok(pairs) => {
            visit_pairs(pairs);
        }
        Err(e) => {
            panic!("Parse error: {} {:?}", e, e)
        }
    }
}

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 获取文件名
    let file_name = &args[1];

    // 读取输入文件
    let input = fs::read_to_string(file_name).expect("Failed to read file");

    // 词法分析
    tokenize(&input);
}
