use pest::Parser;
use pest_derive::Parser;
use std::{env, fs};

#[derive(Parser)]
#[grammar = "lexer.pest"] // 指向 src/lexer.pest
struct SysYLexer;

#[derive(Debug, Clone, PartialEq, Eq)]
enum TokenKind {
    // Keywords
    CONST, INT, VOID, IF, ELSE, WHILE, BREAK, CONTINUE, RETURN,
    // Ops
    PLUS, MINUS, MUL, DIV, MOD, ASSIGN, EQ, NEQ, LT, GT, LE, GE, NOT, AND, OR,
    // Delims
    L_PAREN, R_PAREN, L_BRACE, R_BRACE, L_BRACKT, R_BRACKT, COMMA, SEMICOLON,
    // Id & Lit
    IDENT, INTEGER_CONST,
}

#[derive(Debug, Clone, PartialEq, Eq)]
struct Token {
    kind: TokenKind,
    text: String, // 对 INTEGER_CONST 存“十进制字符串”，其余存原始词素
    line: usize,  // 首字符所在行
}

#[derive(Debug, Clone)]
struct LexError {
    line: usize,
    ch: String,
}

fn rule_to_kind(rule: Rule) -> Option<TokenKind> {
    Some(match rule {
        Rule::CONST => TokenKind::CONST,
        Rule::INT => TokenKind::INT,
        Rule::VOID => TokenKind::VOID,
        Rule::IF => TokenKind::IF,
        Rule::ELSE => TokenKind::ELSE,
        Rule::WHILE => TokenKind::WHILE,
        Rule::BREAK => TokenKind::BREAK,
        Rule::CONTINUE => TokenKind::CONTINUE,
        Rule::RETURN => TokenKind::RETURN,

        Rule::PLUS => TokenKind::PLUS,
        Rule::MINUS => TokenKind::MINUS,
        Rule::MUL => TokenKind::MUL,
        Rule::DIV => TokenKind::DIV,
        Rule::MOD => TokenKind::MOD,
        Rule::ASSIGN => TokenKind::ASSIGN,
        Rule::EQ => TokenKind::EQ,
        Rule::NEQ => TokenKind::NEQ,
        Rule::LT => TokenKind::LT,
        Rule::GT => TokenKind::GT,
        Rule::LE => TokenKind::LE,
        Rule::GE => TokenKind::GE,
        Rule::NOT => TokenKind::NOT,
        Rule::AND => TokenKind::AND,
        Rule::OR => TokenKind::OR,

        Rule::L_PAREN => TokenKind::L_PAREN,
        Rule::R_PAREN => TokenKind::R_PAREN,
        Rule::L_BRACE => TokenKind::L_BRACE,
        Rule::R_BRACE => TokenKind::R_BRACE,
        Rule::L_BRACKT => TokenKind::L_BRACKT,
        Rule::R_BRACKT => TokenKind::R_BRACKT,
        Rule::COMMA => TokenKind::COMMA,
        Rule::SEMICOLON => TokenKind::SEMICOLON,

        Rule::IDENT => TokenKind::IDENT,
        Rule::INTEGER_CONST => TokenKind::INTEGER_CONST,
        _ => return None,
    })
}

// 将整数词素转为十进制字符串
fn int_lexeme_to_decimal_text(s: &str) -> String {
    if let Some(hex) = s.strip_prefix("0x").or_else(|| s.strip_prefix("0X")) {
        let v = u128::from_str_radix(hex, 16).unwrap_or(0);
        return v.to_string();
    }
    // 八进制：以 0 开头，包括单独的 "0" 和 "0002" 这类
    if s.starts_with('0') {
        let oct_digits = &s[1..];
        if oct_digits.is_empty() {
            return "0".into();
        }
        let v = u128::from_str_radix(oct_digits, 8).unwrap_or(0);
        return v.to_string();
    }
    // 十进制
    s.to_string()
}

/// 词法分析：成功返回 tokens，失败返回全部错误列表
fn tokenize(input: &str) -> Result<Vec<Token>, Vec<LexError>> {
    let mut tokens = Vec::<Token>::new();
    let mut errors = Vec::<LexError>::new();

    // 顶层一定能成功（因为有 error 兜底）
    let pairs = SysYLexer::parse(Rule::file, input).expect("parser should not fail");

    for pair in pairs {
        match pair.as_rule() {
            Rule::file => {
                for item in pair.into_inner() {
                    match item.as_rule() {
                        Rule::skip => { /* 忽略空白和注释 */ }
                        Rule::token => {
                            // token 是一层包裹，里面才是真正的具体规则
                            let inner = item.into_inner().next().unwrap();
                            let rule = inner.as_rule();
                            let (line, _col) = inner.as_span().start_pos().line_col();

                            if let Some(kind) = rule_to_kind(rule) {
                                let raw = inner.as_str().to_string();
                                let text = match kind {
                                    TokenKind::INTEGER_CONST => int_lexeme_to_decimal_text(&raw),
                                    _ => raw,
                                };
                                tokens.push(Token { kind, text, line });
                            }
                        }
                        Rule::error => {
                            let (line, _col) = item.as_span().start_pos().line_col();
                            let ch = item.as_str().to_string();
                            errors.push(LexError { line, ch });
                        }
                        _ => {}
                    }
                }
            }
            _ => {}
        }
    }

    if errors.is_empty() {
        Ok(tokens)
    } else {
        Err(errors)
    }
}

fn kind_name(kind: &TokenKind) -> &'static str {
    match kind {
        TokenKind::CONST => "CONST",
        TokenKind::INT => "INT",
        TokenKind::VOID => "VOID",
        TokenKind::IF => "IF",
        TokenKind::ELSE => "ELSE",
        TokenKind::WHILE => "WHILE",
        TokenKind::BREAK => "BREAK",
        TokenKind::CONTINUE => "CONTINUE",
        TokenKind::RETURN => "RETURN",

        TokenKind::PLUS => "PLUS",
        TokenKind::MINUS => "MINUS",
        TokenKind::MUL => "MUL",
        TokenKind::DIV => "DIV",
        TokenKind::MOD => "MOD",
        TokenKind::ASSIGN => "ASSIGN",
        TokenKind::EQ => "EQ",
        TokenKind::NEQ => "NEQ",
        TokenKind::LT => "LT",
        TokenKind::GT => "GT",
        TokenKind::LE => "LE",
        TokenKind::GE => "GE",
        TokenKind::NOT => "NOT",
        TokenKind::AND => "AND",
        TokenKind::OR => "OR",

        TokenKind::L_PAREN => "L_PAREN",
        TokenKind::R_PAREN => "R_PAREN",
        TokenKind::L_BRACE => "L_BRACE",
        TokenKind::R_BRACE => "R_BRACE",
        TokenKind::L_BRACKT => "L_BRACKT",
        TokenKind::R_BRACKT => "R_BRACKT",
        TokenKind::COMMA => "COMMA",
        TokenKind::SEMICOLON => "SEMICOLON",

        TokenKind::IDENT => "IDENT",
        TokenKind::INTEGER_CONST => "INTEGER_CONST",
    }
}

fn main() {
    // 收集命令行参数
    let args: Vec<String> = env::args().collect();

    if args.len() < 2 {
        eprintln!("Usage: {} <filename>", args[0]);
        std::process::exit(1);
    }

    let filename = &args[1];
    let input = match fs::read_to_string(filename) {
        Ok(s) => s,
        Err(e) => {
            eprintln!("Failed to read file {}: {}", filename, e);
            std::process::exit(1);
        }
    };

    match tokenize(&input) {
        Ok(tokens) => {
            for t in tokens {
                eprintln!("{} {} at Line {}.", kind_name(&t.kind), t.text, t.line);
            }
        }
        Err(errors) => {
            for e in errors {
                eprintln!(
                    "Error type A at Line {}: Mysterious character \"{}\".",
                    e.line, e.ch
                );
            }
        }
    }
}
