use std::env;

use lexer::Rule;
use pest::{Parser, iterators::Pair};
mod lexer;

fn main() {
    let args: Vec<String> = env::args().collect();

    if args.len() < 2 {
        eprint!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    let filename = &args[1];
    let input = std::fs::read_to_string(filename).expect("Failed to read file");

    let pairs =
        lexer::SysYLexer::parse(lexer::Rule::tokens, &input).expect("Failed to parse input");
    let pair: pest::iterators::Pair<'_, lexer::Rule> = pairs.peek().expect("Failed to get tokens");
    let tokens = pair.clone().into_inner().into_iter().collect::<Vec<_>>();
    let valid_tokens: Vec<_> = tokens
        .iter()
        .filter(|t| t.as_rule() == lexer::Rule::valid_token)
        .map(|t| {
            t.clone()
                .into_inner()
                .peek()
                .expect("Failed to get valid token")
        })
        .collect();
    let error_tokens: Vec<Pair<Rule>> = tokens
        .iter()
        .filter(|t| t.as_rule() == lexer::Rule::error_token)
        .map(|t| t.clone())
        .collect();
    // show_valid_tokens(&valid_tokens);
    // show_error_tokens(&error_tokens);
    if error_tokens.len() > 0 {
        show_error_tokens(&error_tokens);
    } else {
        show_valid_tokens(&valid_tokens);
    }
    // match lexer::SysYLexer::parse(lexer::Rule::tokens, &input) {
    //     Ok(pairs) => {
    //         let pair = pairs.peek().expect("Failed to get tokens");
    //         let tokens = pair.clone().into_inner().into_iter().collect::<Vec<_>>();
    //         let valid_tokens: Vec<_> = tokens
    //             .iter()
    //             .filter(|t| t.as_rule() == lexer::Rule::valid_token)
    //             .map(|t| {
    //                 t.clone()
    //                     .into_inner()
    //                     .peek()
    //                     .expect("Failed to get valid token")
    //             })
    //             .collect();
    //         show_valid_tokens(&valid_tokens);
    //     }
    //     Err(e) => {
    //         let line = match e.line_col {
    //             pest::error::LineColLocation::Pos((line, _)) => line,
    //             pest::error::LineColLocation::Span((line, _), _) => line,
    //         };
    //         eprintln!("Error type A at Line {}:'{}'", line, e.line())
    //     }
    // }
}

fn show_valid_tokens(tokens: &Vec<Pair<Rule>>) {
    for token in tokens {
        let span = token.as_span();
        let (line, _) = span.start_pos().line_col();
        match token.as_rule() {
            Rule::CONST => eprintln!("CONST {} at Line {}.", token.as_str(), line),
            Rule::INT => eprintln!("INT {} at Line {}.", token.as_str(), line),
            Rule::VOID => eprintln!("VOID {} at Line {}.", token.as_str(), line),
            Rule::IF => eprintln!("IF {} at Line {}.", token.as_str(), line),
            Rule::ELSE => eprintln!("ELSE {} at Line {}.", token.as_str(), line),
            Rule::WHILE => eprintln!("WHILE {} at Line {}.", token.as_str(), line),
            Rule::BREAK => eprintln!("BREAK {} at Line {}.", token.as_str(), line),
            Rule::CONTINUE => eprintln!("CONTINUE {} at Line {}.", token.as_str(), line),
            Rule::RETURN => eprintln!("RETURN {} at Line {}.", token.as_str(), line),
            Rule::PLUS => eprintln!("PLUS {} at Line {}.", token.as_str(), line),
            Rule::MINUS => eprintln!("MINUS {} at Line {}.", token.as_str(), line),
            Rule::MUL => eprintln!("MUL {} at Line {}.", token.as_str(), line),
            Rule::DIV => eprintln!("DIV {} at Line {}.", token.as_str(), line),
            Rule::MOD => eprintln!("MOD {} at Line {}.", token.as_str(), line),
            Rule::ASSIGN => eprintln!("ASSIGN {} at Line {}.", token.as_str(), line),
            Rule::EQ => eprintln!("EQ {} at Line {}.", token.as_str(), line),
            Rule::NEQ => eprintln!("NEQ {} at Line {}.", token.as_str(), line),
            Rule::LT => eprintln!("LT {} at Line {}.", token.as_str(), line),
            Rule::GT => eprintln!("GT {} at Line {}.", token.as_str(), line),
            Rule::LE => eprintln!("LE {} at Line {}.", token.as_str(), line),
            Rule::GE => eprintln!("GE {} at Line {}.", token.as_str(), line),
            Rule::NOT => eprintln!("NOT {} at Line {}.", token.as_str(), line),
            Rule::AND => eprintln!("AND {} at Line {}.", token.as_str(), line),
            Rule::OR => eprintln!("OR {} at Line {}.", token.as_str(), line),
            Rule::L_PAREN => eprintln!("L_PAREN {} at Line {}.", token.as_str(), line),
            Rule::R_PAREN => eprintln!("R_PAREN {} at Line {}.", token.as_str(), line),
            Rule::L_BRACE => eprintln!("L_BRACE {} at Line {}.", token.as_str(), line),
            Rule::R_BRACE => eprintln!("R_BRACE {} at Line {}.", token.as_str(), line),
            Rule::L_BRACKT => eprintln!("L_BRACKT {} at Line {}.", token.as_str(), line),
            Rule::R_BRACKT => eprintln!("R_BRACKT {} at Line {}.", token.as_str(), line),
            Rule::COMMA => eprintln!("COMMA {} at Line {}.", token.as_str(), line),
            Rule::SEMICOLON => eprintln!("SEMICOLON {} at Line {}.", token.as_str(), line),
            Rule::IDENT => eprintln!("IDENT {} at Line {}.", token.as_str(), line),
            // Rule::INTEGER_CONST => eprintln!("INTEGER_CONST {} at Line {}.", token.as_str(), line),
            Rule::INTEGER_CONST => {
                let value =
                    parse_integer_const(token.as_str()).expect("Failed to parse integer constant");
                eprintln!("INTEGER_CONST {} at Line {}.", value, line);
            }
            _ => (),
        }
    }
}

fn parse_integer_const(s: &str) -> Result<u64, std::num::ParseIntError> {
    if s.starts_with("0x") || s.starts_with("0X") {
        // 十六进制
        u64::from_str_radix(&s[2..], 16)
    } else if s.starts_with("0") && s.len() > 1 {
        // 八进制（注意单个 0 是十进制 0）
        u64::from_str_radix(&s[1..], 8)
    } else {
        // 十进制
        s.parse::<u64>()
    }
}

fn show_error_tokens(tokens: &Vec<Pair<Rule>>) {
    for token in tokens {
        let span = token.as_span();
        let (line, _) = span.start_pos().line_col();
        eprintln!("Error type A at Line {}:'{}'", line, token.as_str());
    }
}
