use pest::Parser;
use pest_derive::Parser;
use std::fmt;

#[derive(Parser)]
#[grammar = "lexer.pest"]
pub struct SysYLexer;

pub fn tokenize(input: &str) -> Result<Vec<Token>, String> {
    match SysYLexer::parse(Rule::PROGRAM, input) {
        Ok(pairs) => {
            let mut tokens = Vec::new();
            let mut errors = Vec::new();

            for pair in pairs {
                for inner_pair in pair.into_inner() {
                    if inner_pair.as_rule() == Rule::EOI {
                        continue;
                    }

                    if inner_pair.as_rule() == Rule::ERROR_TOKEN {
                        let line_num = inner_pair.as_span().start_pos().line_col().0;
                        let error_char = inner_pair.as_str();
                        errors.push(format!(
                            "Error type A at Line {}: Mysterious character \"{}\".",
                            line_num, error_char
                        ));
                        continue;
                    }

                    if inner_pair.as_rule() == Rule::TOKEN {
                        for token_pair in inner_pair.into_inner() {
                            let token = Token::from_pair(token_pair)?;
                            tokens.push(token);
                        }
                    }
                }
            }
            if !errors.is_empty() {
                return Err(errors.join("\n"));
            }

            Ok(tokens)
        }
        Err(e) => Err(format!("Parse error: {}", e)),
    }
}

#[derive(Debug, Clone)]
pub struct Token {
    pub token_type: TokenType,
    pub token_text: String,
    pub line_num: usize,
}

#[allow(non_camel_case_types)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TokenType {
    CONST,
    INT,
    VOID,
    IF,
    ELSE,
    WHILE,
    BREAK,
    CONTINUE,
    RETURN,
    EQ,
    NEQ,
    LE,
    GE,
    AND,
    OR,
    PLUS,
    MINUS,
    MUL,
    DIV,
    MOD,
    ASSIGN,
    LT,
    GT,
    NOT,
    L_PAREN,
    R_PAREN,
    L_BRACE,
    R_BRACE,
    L_BRACKT,
    R_BRACKT,
    COMMA,
    SEMICOLON,
    IDENT,
    INVALID_OCTAL,
    INTEGER_CONST,
}

impl fmt::Display for TokenType {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        let name = match self {
            TokenType::CONST => "CONST",
            TokenType::INT => "INT",
            TokenType::VOID => "VOID",
            TokenType::IF => "IF",
            TokenType::ELSE => "ELSE",
            TokenType::WHILE => "WHILE",
            TokenType::BREAK => "BREAK",
            TokenType::CONTINUE => "CONTINUE",
            TokenType::RETURN => "RETURN",
            TokenType::EQ => "EQ",
            TokenType::NEQ => "NEQ",
            TokenType::LE => "LE",
            TokenType::GE => "GE",
            TokenType::AND => "AND",
            TokenType::OR => "OR",
            TokenType::PLUS => "PLUS",
            TokenType::MINUS => "MINUS",
            TokenType::MUL => "MUL",
            TokenType::DIV => "DIV",
            TokenType::MOD => "MOD",
            TokenType::ASSIGN => "ASSIGN",
            TokenType::LT => "LT",
            TokenType::GT => "GT",
            TokenType::NOT => "NOT",
            TokenType::L_PAREN => "L_PAREN",
            TokenType::R_PAREN => "R_PAREN",
            TokenType::L_BRACE => "L_BRACE",
            TokenType::R_BRACE => "R_BRACE",
            TokenType::L_BRACKT => "L_BRACKT",
            TokenType::R_BRACKT => "R_BRACKT",
            TokenType::COMMA => "COMMA",
            TokenType::SEMICOLON => "SEMICOLON",
            TokenType::IDENT => "IDENT",
            TokenType::INVALID_OCTAL => "INVALID_OCTAL",
            TokenType::INTEGER_CONST => "INTEGER_CONST",
        };
        write!(f, "{}", name)
    }
}

impl TokenType {
    pub fn get_fixed_text(&self) -> Option<&'static str> {
        match self {
            // 关键字
            TokenType::CONST => Some("const"),
            TokenType::INT => Some("int"),
            TokenType::VOID => Some("void"),
            TokenType::IF => Some("if"),
            TokenType::ELSE => Some("else"),
            TokenType::WHILE => Some("while"),
            TokenType::BREAK => Some("break"),
            TokenType::CONTINUE => Some("continue"),
            TokenType::RETURN => Some("return"),

            // 运算符
            TokenType::EQ => Some("=="),
            TokenType::NEQ => Some("!="),
            TokenType::LE => Some("<="),
            TokenType::GE => Some(">="),
            TokenType::AND => Some("&&"),
            TokenType::OR => Some("||"),
            TokenType::PLUS => Some("+"),
            TokenType::MINUS => Some("-"),
            TokenType::MUL => Some("*"),
            TokenType::DIV => Some("/"),
            TokenType::MOD => Some("%"),
            TokenType::ASSIGN => Some("="),
            TokenType::LT => Some("<"),
            TokenType::GT => Some(">"),
            TokenType::NOT => Some("!"),

            // 分隔符
            TokenType::L_PAREN => Some("("),
            TokenType::R_PAREN => Some(")"),
            TokenType::L_BRACE => Some("{"),
            TokenType::R_BRACE => Some("}"),
            TokenType::L_BRACKT => Some("["),
            TokenType::R_BRACKT => Some("]"),
            TokenType::COMMA => Some(","),
            TokenType::SEMICOLON => Some(";"),

            // 使用实际的token文本
            TokenType::IDENT | TokenType::INTEGER_CONST | TokenType::INVALID_OCTAL => None,
        }
    }

    fn from_rule(rule: Rule) -> Option<Self> {
        match rule {
            Rule::CONST => Some(TokenType::CONST),
            Rule::INT => Some(TokenType::INT),
            Rule::VOID => Some(TokenType::VOID),
            Rule::IF => Some(TokenType::IF),
            Rule::ELSE => Some(TokenType::ELSE),
            Rule::WHILE => Some(TokenType::WHILE),
            Rule::BREAK => Some(TokenType::BREAK),
            Rule::CONTINUE => Some(TokenType::CONTINUE),
            Rule::RETURN => Some(TokenType::RETURN),
            Rule::EQ => Some(TokenType::EQ),
            Rule::NEQ => Some(TokenType::NEQ),
            Rule::LE => Some(TokenType::LE),
            Rule::GE => Some(TokenType::GE),
            Rule::AND => Some(TokenType::AND),
            Rule::OR => Some(TokenType::OR),
            Rule::PLUS => Some(TokenType::PLUS),
            Rule::MINUS => Some(TokenType::MINUS),
            Rule::MUL => Some(TokenType::MUL),
            Rule::DIV => Some(TokenType::DIV),
            Rule::MOD => Some(TokenType::MOD),
            Rule::ASSIGN => Some(TokenType::ASSIGN),
            Rule::LT => Some(TokenType::LT),
            Rule::GT => Some(TokenType::GT),
            Rule::NOT => Some(TokenType::NOT),
            Rule::L_PAREN => Some(TokenType::L_PAREN),
            Rule::R_PAREN => Some(TokenType::R_PAREN),
            Rule::L_BRACE => Some(TokenType::L_BRACE),
            Rule::R_BRACE => Some(TokenType::R_BRACE),
            Rule::L_BRACKT => Some(TokenType::L_BRACKT),
            Rule::R_BRACKT => Some(TokenType::R_BRACKT),
            Rule::COMMA => Some(TokenType::COMMA),
            Rule::SEMICOLON => Some(TokenType::SEMICOLON),
            Rule::IDENT => Some(TokenType::IDENT),
            Rule::INTEGER_CONST => Some(TokenType::INTEGER_CONST),
            Rule::INVALID_OCTAL => Some(TokenType::INVALID_OCTAL),
            _ => None,
        }
    }
}

impl Token {
    // 创建新的 Token
    fn new(token_type: TokenType, token_text: String, line_num: usize) -> Self {
        Token {
            token_type,
            token_text,
            line_num,
        }
    }

    // 从 Pest pair 创建 Token
    fn from_pair(pair: pest::iterators::Pair<Rule>) -> Result<Self, String> {
        let rule = pair.as_rule();
        let token_str = pair.as_str();
        let line_num = pair.as_span().start_pos().line_col().0;

        let token_type = TokenType::from_rule(rule)
            .ok_or_else(|| format!("Unexpected token rule: {:?}", rule))?;

        let token_text = match token_type {
            TokenType::INTEGER_CONST => {
                // 转换为十进制值
                let decimal_value = if token_str.starts_with("0x") || token_str.starts_with("0X") {
                    i64::from_str_radix(&token_str[2..], 16)
                        .map_err(|_| format!("Invalid hex number: {}", token_str))?
                } else if token_str.starts_with("0") && token_str.len() > 1 {
                    i64::from_str_radix(&token_str[1..], 8)
                        .map_err(|_| format!("Invalid octal number: {}", token_str))?
                } else {
                    token_str
                        .parse::<i64>()
                        .map_err(|_| format!("Invalid decimal number: {}", token_str))?
                };
                decimal_value.to_string()
            }
            TokenType::IDENT => token_str.to_string(),
            TokenType::INVALID_OCTAL => token_str.to_string(),
            _ => {
                // 对于其他类型，使用固定文本
                token_type.get_fixed_text().unwrap_or(token_str).to_string()
            }
        };

        Ok(Token::new(token_type, token_text, line_num))
    }

    // 格式化输出
    pub fn format_output(&self) -> String {
        format!(
            "{} {} at Line {}.",
            self.token_type, self.token_text, self.line_num
        )
    }
}
