use std::{env, fs};

use pest::{Parser, iterators::Pair, error::InputLocation};
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
struct SysYLexer;

enum TokenInfo {
    Token { line: usize, token_type: String, token_text: String },
    Error { line: usize, char: String },
}

fn token_name(rule: &Rule) -> &'static str {
    match *rule {
        Rule::CONST_KW => "CONST",
        Rule::INT_KW => "INT", 
        Rule::VOID_KW => "VOID",
        Rule::IF_KW => "IF",
        Rule::ELSE_KW => "ELSE",
        Rule::WHILE_KW => "WHILE",
        Rule::BREAK_KW => "BREAK",
        Rule::CONTINUE_KW => "CONTINUE", 
        Rule::RETURN_KW => "RETURN",
        Rule::PLUS => "PLUS",
        Rule::MINUS => "MINUS",
        Rule::MUL => "MUL",
        Rule::DIV => "DIV",
        Rule::MOD => "MOD",
        Rule::ASSIGN => "ASSIGN",
        Rule::EQ => "EQ",
        Rule::NEQ => "NEQ",
        Rule::LT => "LT",
        Rule::GT => "GT",
        Rule::LE => "LE",
        Rule::GE => "GE",
        Rule::NOT => "NOT",
        Rule::AND => "AND",
        Rule::OR => "OR",
        Rule::L_PAREN => "L_PAREN",
        Rule::R_PAREN => "R_PAREN",
        Rule::L_BRACE => "L_BRACE",
        Rule::R_BRACE => "R_BRACE",
        Rule::L_BRACKET => "L_BRACKT",
        Rule::R_BRACKET => "R_BRACKT",
        Rule::COMMA => "COMMA",
        Rule::SEMICOLON => "SEMICOLON",
        Rule::IDENT => "IDENT",
        Rule::INTEGER_CONST | Rule::HEX_CONST | Rule::OCTAL_CONST | Rule::DECIMAL_CONST => "INTEGER_CONST",
        _ => "UNKNOWN",
    }
}

fn convert_integer(text: &str) -> String {
    if text.starts_with("0x") || text.starts_with("0X") {
        let hex_part = &text[2..];
        match i64::from_str_radix(hex_part, 16) {
            Ok(val) => val.to_string(),
            Err(_) => text.to_string(),
        }
    } else if text.starts_with("0") && text.len() > 1 && text.chars().all(|c| c.is_ascii_digit()) {
        match i64::from_str_radix(text, 8) {
            Ok(val) => val.to_string(),
            Err(_) => text.to_string(),
        }
    } else {
        text.to_string()
    }
}

fn get_line_number(input: &str, pos: usize) -> usize {
    if pos == 0 {
        return 1;
    }
    input[..pos].chars().filter(|&c| c == '\n').count() + 1
}

fn tokenize(input: &str) -> Vec<TokenInfo> {
    match SysYLexer::parse(Rule::PROGRAM, input) {
        Ok(pairs) => {
            let mut results = Vec::new();
            for pair in pairs {
                if let Some(message) = process_pair(&pair, input) {
                    results.extend(message);
                }
            }
            results
        }
        Err(e) => {
            match e.location {
                InputLocation::Pos(pos) => {
                    let line_no = get_line_number(input, pos);
                    let char_at_pos = input.chars().nth(pos).unwrap_or('\0');
                    vec![TokenInfo::Error { line: line_no, char: char_at_pos.to_string() }]
                },
                InputLocation::Span((start, _)) => {
                    let line_no = get_line_number(input, start);
                    let char_at_pos = input.chars().nth(start).unwrap_or('\0');
                    vec![TokenInfo::Error { line: line_no, char: char_at_pos.to_string() }]
                }
            }
        }
    }
}

fn process_pair(pair: &Pair<Rule>, input: &str) -> Option<Vec<TokenInfo>> {
    match pair.as_rule() {
        Rule::PROGRAM => {
            let mut results = Vec::new();
            for inner_pair in pair.clone().into_inner() {
                if let Some(mut inner_results) = process_pair(&inner_pair, input) {
                    results.append(&mut inner_results);
                }
            }
            if results.is_empty() {
                None
            } else {
                Some(results)
            }
        }
        Rule::UNKNOWN_CHAR => {
            let line_no = get_line_number(input, pair.as_span().start());
            let char = pair.as_str();
            Some(vec![TokenInfo::Error { line: line_no, char: char.to_string() }])
        }
        Rule::EOI => {
            None
        }
        _ => {
            let inner_pairs: Vec<_> = pair.clone().into_inner().collect();
            if !inner_pairs.is_empty() {
                let mut results = Vec::new();
                for inner_pair in inner_pairs {
                    if let Some(mut inner_results) = process_pair(&inner_pair, input) {
                        results.append(&mut inner_results);
                    }
                }
                if results.is_empty() {
                    None
                } else {
                    Some(results)
                }
            } else {
                let line_no = get_line_number(input, pair.as_span().start());
                let token_type = token_name(&pair.as_rule());
                let token_text = if matches!(pair.as_rule(), Rule::INTEGER_CONST | Rule::HEX_CONST | Rule::OCTAL_CONST | Rule::DECIMAL_CONST) {
                    convert_integer(pair.as_str())
                } else {
                    pair.as_str().to_string()
                };

                if token_type != "UNKNOWN" {
                    Some(vec![TokenInfo::Token { line: line_no, token_type: token_type.to_string(), token_text }])
                } else {
                    None
                }
            }
        }
    }
}

fn main() {
    let args: Vec<String> = env::args().collect();
    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }
    let filename = &args[1];
    let input = fs::read_to_string(filename).expect("Failed to read file");

    let token_strs: Vec<TokenInfo> = tokenize(&input);
    let has_error = token_strs.iter().any(|t| matches!(t, TokenInfo::Error { .. }));
    for token_str in token_strs {
        match token_str {
            TokenInfo::Token { line, token_type, token_text } => {
                if !has_error {
                    println!("{} {} at Line {}.", token_type, token_text, line);
                }
            }
            TokenInfo::Error { line, char } => {
                eprintln!("Error type A at Line {}: Mysterious character \"{}\".", line, char);
            }
            
        }
    }
}