mod lexer;
use pest_derive::Parser;
use pest::Parser;
use std::{env, fs};

#[derive(Parser)]
#[grammar = "ident.pest"]
struct IdentParser;

fn tokenize_lines(lines: &[&str]) {
    
    let mut is_multiline_blocked: bool = false;
    let mut output = Vec::new();
    for (line_index, line) in lines.iter().enumerate() {
        let line_number = line_index + 1; // 行号从1开始
        
    let tokens = IdentParser::parse(Rule::line, line).unwrap_or_else(|e| panic!("{}", e));

    // Because ident_list is silent, the iterator will contain idents
    for token in tokens {
        let mut token_str = String::new();
        // A pair is a combination of the rule which matched and a span of input
        match token.as_rule() {
            Rule::LINE_COMMENT => continue,
            Rule::MULTILINE_COMMENT_LEFT => {is_multiline_blocked = true;},
            Rule::MULTILINE_COMMENT_RIGHT => {
                is_multiline_blocked = false;
                continue;
            },
            Rule::INTEGER_CONST => {
                match parse_number(token.as_str()){
                    Ok(num) => {
                        token_str = num.to_string();
                    },
                    Err(e) => {eprintln!("{:?}", e);},
                };
            },
            Rule::MYSTERIOUS_CHARACTER => {
                eprintln!("Error type A at Line {}: Mysterious character \"{}\".", line_number, token.as_str());
                std::process::exit(0);
                },
            _ =>{ token_str = token.as_str().to_string()},
        };
        if is_multiline_blocked {
            continue;
        }
        // println!("{:?} {} at Line {}.", token.as_rule(), token_str, line_number);
        output.push(format!("{:?} {} at Line {}.", token.as_rule(), token_str, line_number));

        // println!("Span:    {:?}", pair.as_span());

        // A pair can be converted to an iterator of the tokens which make it up:
        // for inner_pair in pair.into_inner() {
            // match inner_pair.as_rule() {
                // Rule::IDENT => println!("IDENT:  {}", inner_pair.as_str()),
                // Rule::CONST => println!("CONST:  {}", inner_pair.as_str()),
                // Rule::DEC_INT => println!("DEC_INT:   {}", inner_pair.as_str()),
                // _ => unreachable!()
            // };
        // }
    }
}

for result in output {
    eprintln!("{}", result);
} 
}

fn main() -> Result<(), Box<dyn std::error::Error>> {
    let args: Vec<String> = env::args().collect();
    (args.len() < 2).then(|| panic!("Usage: {} <filename>", args[0]));
    let filename = &args[1];
    let input = fs::read_to_string(filename)?;


    lexer::file_tokenize(&input);

    // ast_define::try_parse(&input);
    Ok(())
}

fn read_and_parse() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();
    
    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }
    
    // 获取文件名
    let filename = &args[1];
    
    // 读取输入文件
    let input = match fs::read_to_string(&filename) {
        Ok(content) => content,
        Err(e) => {
            eprintln!("Failed to read file '{}': {}", filename, e);
            std::process::exit(1);
        }
    };
    
    // 将输入分行处理
    let lines: Vec<&str> = input.lines().collect();
    
    // 词法分析
    tokenize_lines(&lines);
}


fn parse_number(input: &str) -> Result<i64, std::num::ParseIntError> {
    if input == "0" {
        return Ok(0);
    }
    if let Some(stripped) = input.strip_prefix("0x") {
        // 十六进制
        i64::from_str_radix(stripped, 16)
    } else if let Some(stripped) = input.strip_prefix("0") {
        // 八进制
        i64::from_str_radix(stripped, 8)
    } else {
        // 默认十进制

        i64::from_str_radix(input, 10)
    }
}
