use pest::iterators::Pair;
use pest::Parser;
use pest_derive::Parser;

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct SysYLexer;

#[derive(Debug, Clone)]
pub enum TokKind {
    // 关键字
    CONST, INT, VOID, IF, ELSE, WHILE, BREAK, CONTINUE, RETURN,
    // 运算/逻辑
    PLUS, MINUS, MUL, DIV, MOD, ASSIGN, EQ, NEQ, LT, GT, LE, GE, NOT, AND, OR,
    // 界符
    L_PAREN, R_PAREN, L_BRACE, R_BRACE, L_BRACKT, R_BRACKT, COMMA, SEMICOLON,
    // 标识符 / 常量
    IDENT(String),
    INTEGER_CONST(String), // 打印十进制文本
    // 忽略类
    WS, LINE_COMMENT, MULTILINE_COMMENT,
}

impl TokKind {
    pub fn name_for_print(&self) -> &'static str {
        use TokKind::*;
        match self {
            CONST => "CONST", INT => "INT", VOID => "VOID", IF => "IF", ELSE => "ELSE",
            WHILE => "WHILE", BREAK => "BREAK", CONTINUE => "CONTINUE", RETURN => "RETURN",
            PLUS => "PLUS", MINUS => "MINUS", MUL => "MUL", DIV => "DIV", MOD => "MOD",
            ASSIGN => "ASSIGN", EQ => "EQ", NEQ => "NEQ", LT => "LT", GT => "GT", // 修复 NEQ => NEQ
            LE => "LE", GE => "GE", NOT => "NOT", AND => "AND", OR => "OR",
            L_PAREN => "L_PAREN", R_PAREN => "R_PAREN",
            L_BRACE => "L_BRACE", R_BRACE => "R_BRACE",
            L_BRACKT => "L_BRACKT", R_BRACKT => "R_BRACKT",
            COMMA => "COMMA", SEMICOLON => "SEMICOLON",
            IDENT(_) => "IDENT",
            INTEGER_CONST(_) => "INTEGER_CONST",
            WS | LINE_COMMENT | MULTILINE_COMMENT => "<IGNORED>",
        }
    }

    pub fn lexeme_for_print(&self) -> String {
        use TokKind::*;
        match self {
            IDENT(s) => s.clone(),
            INTEGER_CONST(s) => s.clone(),
            CONST => "const".into(), INT => "int".into(), VOID => "void".into(),
            IF => "if".into(), ELSE => "else".into(), WHILE => "while".into(),
            BREAK => "break".into(), CONTINUE => "continue".into(), RETURN => "return".into(),
            PLUS => "+".into(), MINUS => "-".into(), MUL => "*".into(), DIV => "/".into(),
            MOD => "%".into(), ASSIGN => "=".into(), EQ => "==".into(), NEQ => "!=".into(),
            LT => "<".into(), GT => ">".into(), LE => "<=".into(), GE => ">=".into(),
            NOT => "!".into(), AND => "&&".into(), OR => "||".into(),
            L_PAREN => "(".into(), R_PAREN => ")".into(),
            L_BRACE => "{".into(), R_BRACE => "}".into(),
            L_BRACKT => "[".into(), R_BRACKT => "]".into(),
            COMMA => ",".into(), SEMICOLON => ";".into(),
            WS | LINE_COMMENT | MULTILINE_COMMENT => "".into(),
        }
    }
}

#[derive(Debug, Clone)]
pub struct TokenInfo {
    pub kind: TokKind,
    pub line: usize, // 1-based
}

#[derive(Debug, Clone)]
pub struct ErrorInfo {
    pub ch: String,
    pub line: usize,
}

// make_tok 现在只处理实际的 token 规则（keywords, operators, identifiers, constants, comments, errors）
// 它不会接收 Rule::file 或 Rule::token 这种包裹性规则
fn make_tok(pair: Pair<'_, Rule>) -> Option<Result<TokenInfo, ErrorInfo>> {
    use TokKind::*;
    let span = pair.as_span();
    let (line, _col) = span.start_pos().line_col();
    let rule = pair.as_rule();
    let text = pair.as_str();

    let res = match rule {
        // 忽略的规则（WS, 注释）
        Rule::WS => return Some(Ok(TokenInfo { kind: WS, line })),
        Rule::LINE_COMMENT => return Some(Ok(TokenInfo { kind: LINE_COMMENT, line })),
        Rule::MULTILINE_COMMENT => return Some(Ok(TokenInfo { kind: MULTILINE_COMMENT, line })),

        // 关键字
        Rule::CONST => CONST,
        Rule::INT => INT,
        Rule::VOID => VOID,
        Rule::IF => IF,
        Rule::ELSE => ELSE,
        Rule::WHILE => WHILE,
        Rule::BREAK => BREAK,
        Rule::CONTINUE => CONTINUE,
        Rule::RETURN => RETURN,

        // 操作符 / 界符
        Rule::PLUS => PLUS, Rule::MINUS => MINUS, Rule::MUL => MUL,
        Rule::DIV => DIV, Rule::MOD => MOD, Rule::ASSIGN => ASSIGN,
        Rule::EQ => EQ, Rule::NEQ => NEQ,
        Rule::LT => LT, Rule::GT => GT, // 修复 LT => LT, GT => GT
        Rule::LE => LE, Rule::GE => GE, // 修复 GE => GE
        Rule::NOT => NOT, Rule::AND => AND, Rule::OR => OR, // 修复 NOT, AND, OR
        Rule::L_PAREN => L_PAREN, Rule::R_PAREN => R_PAREN,
        Rule::L_BRACE => L_BRACE, Rule::R_BRACE => R_BRACE,
        Rule::L_BRACKT => L_BRACKT, Rule::R_BRACKT => R_BRACKT,
        Rule::COMMA => COMMA, Rule::SEMICOLON => SEMICOLON,

        // 标识符
        Rule::IDENT => {
            return Some(Ok(TokenInfo {
                kind: IDENT(text.to_string()),
                line,
            }));
        }

        // 整数常量
        Rule::INTEGER_CONST => {
            let dec_text = if text.starts_with("0x") || text.starts_with("0X") {
                // 十六进制
                let v = i64::from_str_radix(&text[2..], 16).unwrap();
                v.to_string()
            } else if text.len() > 1 && text.starts_with('0') {
                // 八进制（至少 2 位，避免单独的 "0"）
                let v = i64::from_str_radix(&text[1..], 8).unwrap_or(0);
                v.to_string()
            } else {
                // 十进制
                text.to_string()
            };
            return Some(Ok(TokenInfo {
                kind: INTEGER_CONST(dec_text),
                line,
            }));
        }

        // 错误字符
        Rule::ERROR_CHAR => {
            return Some(Err(ErrorInfo { ch: text.to_string(), line }));
        }

        // 这些规则不应该直接传给 make_tok
        Rule::file | Rule::token | Rule::EOI | Rule::_HEX_LITERAL | Rule::_OCT_LITERAL | Rule::_DEC_LITERAL => {
            unreachable!("Unexpected rule {:?} passed to make_tok at line {}: '{}'", rule, line, text)
        },
    };

    Some(Ok(TokenInfo { kind: res, line }))
}

pub fn lex(input: &str) -> (Vec<TokenInfo>, Vec<ErrorInfo>) {
    let mut tokens = Vec::new();
    let mut errors = Vec::new();

    // 解析整个文件，得到一个 Rule::file 的 Pair
    let parsed = SysYLexer::parse(Rule::file, input)
        .unwrap_or_else(|e| panic!("Internal parse error: {}", e));

    // 'parsed' 通常只有一个顶层的 Rule::file Pair
    for file_pair in parsed { // file_pair 的 rule 是 Rule::file
        // 遍历 Rule::file Pair 的内部，这些内部的 Pair 可能是 Rule::token 或 Rule::ERROR_CHAR
        for inner_pair in file_pair.into_inner() {
            match inner_pair.as_rule() {
                Rule::token => {
                    // 如果是 Rule::token，说明它是一个合法的 token
                    // Rule::token 内部包裹了实际的 token 类型（例如 Rule::INT, Rule::IDENT等）
                    // 所以需要再进入一层
                    for actual_token_pair in inner_pair.into_inner() {
                        if let Some(item) = make_tok(actual_token_pair) {
                            match item {
                                Ok(tok) => {
                                    // 过滤掉空白和注释，只保留实际的 token
                                    match tok.kind {
                                        TokKind::WS | TokKind::LINE_COMMENT | TokKind::MULTILINE_COMMENT => {}
                                        _ => tokens.push(tok),
                                    }
                                }
                                Err(err) => errors.push(err),
                            }
                        }
                    }
                }
                Rule::ERROR_CHAR => {
                    // 如果是 Rule::ERROR_CHAR，直接传递给 make_tok 处理
                    // make_tok 对于 ERROR_CHAR 会返回 Some(Err(...))
                    // 所以我们可以安全地 unwrap Option，然后 unwrap Err
                    if let Some(result_of_tok) = make_tok(inner_pair) {
                        let err = result_of_tok.unwrap_err(); // unwrap_err() 会直接得到 ErrorInfo
                        errors.push(err);
                    } else {
                        // make_tok 对于 ERROR_CHAR 应该总是返回 Some(Err(...))
                        unreachable!("make_tok returned None for ERROR_CHAR rule.");
                    }
                }
                Rule::EOI => {
                    // 文件结束符，忽略
                }
                _ => {
                    // 理论上不应该有其他顶层规则
                    unreachable!("Unexpected top-level rule: {:?}", inner_pair.as_rule());
                }
            }
        }
    }

    (tokens, errors)
}
