use std::{env, fs};

use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"] // 指向你的 .pest 文件
struct SysYLexer;

#[derive(Debug, Clone)]
pub struct Span {
    line: usize,
}

#[derive(Debug)]
struct Token {
    kind: Rule,
    span: Span,
    str: String,
}

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    // 检查是否提供了文件名
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    // 获取文件名
    let filename = &args[1];
    // 读取输入文件
    let input = fs::read_to_string(filename).expect("Failed to read file");

    let (meet_err, tokens) = tokenize(&input);
    if meet_err {
        return;
    }

    let tokens = tokens
        .into_iter()
        .collect::<Vec<_>>();

    for token in tokens {
        if let Rule::INTEGER_CONST = token.kind {
            let val = if token.str.starts_with("0x") {
                i64::from_str_radix(&token.str[2..], 16).unwrap()
            } else if token.str.starts_with("0") {
                i64::from_str_radix(&token.str, 8).unwrap()
            } else {
                token.str.parse::<i64>().unwrap()
            };
            eprintln!("{:?} {} at Line {}.", token.kind, val, token.span.line);
        } else {
            eprintln!(
                "{:?} {} at Line {}.",
                token.kind, token.str, token.span.line
            );
        }
    }
}

fn tokenize(input: &str) -> (bool, Vec<Token>) {
    let mut meet_err = false;
    let mut end_pos = 0;
    let mut end_line = 0;

    let mut res = Vec::new();
    while end_pos < input.len() {
        let input = &input[end_pos..];

        // first is unexpected token
        let Ok(mut tokens) = SysYLexer::parse(Rule::TOKENS, input) else {
            let chr = input.chars().next().unwrap();
            end_pos += chr.len_utf8();
            eprintln!(
                r#"Error type A at Line {}: Mysterious character "{}"."#,
                end_line + 1,
                chr,
            );
            meet_err = true;
            continue;
        };

        let tokens = tokens.next().expect("TOKENS must have at least one token");
        let cur_end = tokens.as_span().end();
        let cur_line = tokens.as_span().end_pos().line_col().0;

        res.extend(
            tokens
                .into_inner()
                .filter_map(|token| {
                    // println!("{token:?}");
                    token.into_inner().next().map(|token| {
                        let span = token.as_span().start_pos().line_col();
                        let span = Span {
                            line: end_line + span.0,
                        };
                        Token {
                            kind: token.as_rule(),
                            span,
                            str: token.as_str().to_string(),
                        }
                    })
                })
                .collect::<Vec<_>>(),
        );

        end_pos += cur_end;
        end_line += cur_line - 1;

        // if meet unexpected token
        if cur_end < input.len() {
            let chr = input[cur_end..].chars().next().unwrap();
            end_pos += chr.len_utf8();
            eprintln!(
                r#"Error type A at Line {}: Mysterious character "{}"."#,
                end_line + 1,
                chr,
            );
            meet_err = true;

            // skip unexpected token
            continue;
        }
    }

    (meet_err, res)
}
