use std::str::FromStr as _;

// 语法中合法的token如下：
/*
CONST -> 'const';

INT -> 'int';

VOID -> 'void';

IF -> 'if';

ELSE -> 'else';

WHILE -> 'while';

BREAK -> 'break';

CONTINUE -> 'continue';

RETURN -> 'return';

PLUS -> '+';

MINUS -> '-';

MUL -> '*';

DIV -> '/';

MOD -> '%';

ASSIGN -> '=';

EQ -> '==';

NEQ -> '!=';

LT -> '<';

GT -> '>';

LE -> '<=';

GE -> '>=';

NOT -> '!';

AND -> '&&';

OR -> '||';

L_PAREN -> '(';

R_PAREN -> ')';

L_BRACE -> '{';

R_BRACE -> '}';

L_BRACKT -> '[';

R_BRACKT -> ']';

COMMA -> ',';

SEMICOLON -> ';';

IDENT : 以下划线或字母开头，仅包含下划线、英文字母大小写、阿拉伯数字
   ;

INTEGER_CONST : 数字常量，包含十进制数，0开头的八进制数，0x或0X开头的十六进制数
   ;

WS
   -> [ \r\n\t]+
   ;

LINE_COMMENT
   -> '//' .*? '\n'
   ;

MULTILINE_COMMENT
   -> '/*' .*? '*/'
   ;
*/
use deftoken_macro::SysYToken;

// 定义Token相关的Result
#[derive(Debug, thiserror::Error)]
pub enum TokenizeError {
    #[error("遇到无法识别的Token: {0}")]
    InvalidToken(String),
}

pub type TokenizeResult<T> = Result<T, TokenizeError>;

#[derive(Debug, Clone, PartialEq, Eq, SysYToken)]
pub enum Token {
    #[deftoken(typename = "CONST", typevalue = "const")]
    Const,
    #[deftoken(typename = "INT", typevalue = "int")]
    Int,
    #[deftoken(typename = "VOID", typevalue = "void")]
    Void,
    #[deftoken(typename = "IF", typevalue = "if")]
    If,
    #[deftoken(typename = "ELSE", typevalue = "else")]
    Else,
    #[deftoken(typename = "WHILE", typevalue = "while")]
    While,
    #[deftoken(typename = "BREAK", typevalue = "break")]
    Break,
    #[deftoken(typename = "CONTINUE", typevalue = "continue")]
    Continue,
    #[deftoken(typename = "RETURN", typevalue = "return")]
    Return,
    #[deftoken(typename = "PLUS", typevalue = "+")]
    Plus,
    #[deftoken(typename = "MINUS", typevalue = "-")]
    Minus,
    #[deftoken(typename = "MUL", typevalue = "*")]
    Mul,
    #[deftoken(typename = "DIV", typevalue = "/")]
    Div,
    #[deftoken(typename = "MOD", typevalue = "%")]
    Mod,
    #[deftoken(typename = "ASSIGN", typevalue = "=")]
    Assign,
    #[deftoken(typename = "EQ", typevalue = "==")]
    Eq,
    #[deftoken(typename = "NEQ", typevalue = "!=")]
    Neq,
    #[deftoken(typename = "LT", typevalue = "<")]
    Lt,
    #[deftoken(typename = "GT", typevalue = ">")]
    Gt,
    #[deftoken(typename = "LE", typevalue = "<=")]
    Le,
    #[deftoken(typename = "GE", typevalue = ">=")]
    Ge,
    #[deftoken(typename = "NOT", typevalue = "!")]
    Not,
    #[deftoken(typename = "AND", typevalue = "&&")]
    And,
    #[deftoken(typename = "OR", typevalue = "||")]
    Or,
    #[deftoken(typename = "L_PAREN", typevalue = "(")]
    LParen,
    #[deftoken(typename = "R_PAREN", typevalue = ")")]
    RParen,
    #[deftoken(typename = "L_BRACE", typevalue = "{")]
    LBrace,
    #[deftoken(typename = "R_BRACE", typevalue = "}")]
    RBrace,
    #[deftoken(typename = "L_BRACKT", typevalue = "[")]
    LBrackt,
    #[deftoken(typename = "R_BRACKT", typevalue = "]")]
    RBrackt,
    #[deftoken(typename = "COMMA", typevalue = ",")]
    Comma,
    #[deftoken(typename = "SEMICOLON", typevalue = ";")]
    Semicolon,
    #[deftoken(typename = "IDENT", typevalue = "$0")]
    Ident(String),
    #[deftoken(typename = "INTEGER_CONST", typevalue = "$0")]
    IntegerConst(String),
    #[deftoken(typename = "WS", typevalue = "$0")]
    Ws(String),
    #[deftoken(typename = "LINE_COMMENT", typevalue = "$0")]
    LineComment(String),
    #[deftoken(typename = "MULTILINE_COMMENT", typevalue = "$0")]
    MultilineComment(String),
}

impl Token {
    pub fn parse<T: AsRef<str>>(token_str: T) -> TokenizeResult<Self> {
        let token_str = token_str.as_ref();
        if let Ok(token) = Self::from_str(token_str) {
            return Ok(token);
        };
        // 处理未被宏覆盖的Token
        if Self::is_ident(token_str) {
            return Ok(Token::Ident(token_str.to_string()));
        };
        if Self::is_integer_const(token_str) {
            return Ok(Token::IntegerConst(token_str.to_string()));
        }
        if Self::is_ws(token_str) {
            return Ok(Token::Ws(token_str.to_string()));
        }
        if Self::is_line_comment(token_str) {
            return Ok(Token::LineComment(token_str.to_string()));
        }
        if Self::is_multiline_comment(token_str) {
            return Ok(Token::MultilineComment(token_str.to_string()));
        }
        Err(TokenizeError::InvalidToken(token_str.to_string()))
    }
    fn is_ident(s: &str) -> bool {
        let mut chars = s.chars();
        match chars.next() {
            Some(c) if c.is_ascii_alphabetic() || c == '_' => (),
            _ => return false,
        }
        for c in chars {
            if !(c.is_ascii_alphanumeric() || c == '_') {
                return false;
            }
        }
        true
    }
    fn is_integer_const(s: &str) -> bool {
        // 十进制数
        if s.chars().all(|c| c.is_ascii_digit()) && !(s.starts_with('0') && s.len() > 1) {
            // 多位数不能以0开头
            return true;
        }

        // 八进制数
        if s.starts_with('0') && s.len() > 1 && s.chars().skip(1).all(|c| ('0'..='7').contains(&c))
        {
            return true;
        }
        // 十六进制数
        if (s.starts_with("0x") || s.starts_with("0X"))
            && s.len() > 2
            && s.chars().skip(2).all(|c| c.is_ascii_hexdigit())
        {
            return true;
        }
        false
    }
    fn is_ws(s: &str) -> bool {
        s.chars().all(|c| c.is_whitespace())
    }
    fn is_line_comment(s: &str) -> bool {
        s.starts_with("//") && s.ends_with('\n')
    }
    fn is_multiline_comment(s: &str) -> bool {
        s.starts_with("/*") && s.ends_with("*/")
    }
}

#[cfg(test)]
mod test {
    use std::sync::LazyLock;

    use super::*;

    static TESTS: LazyLock<Vec<(&'static str, Token)>> = LazyLock::new(|| {
        vec![
            ("const", Token::Const),
            ("int", Token::Int),
            ("void", Token::Void),
            ("if", Token::If),
            ("else", Token::Else),
            ("while", Token::While),
            ("break", Token::Break),
            ("continue", Token::Continue),
            ("return", Token::Return),
            ("+", Token::Plus),
            ("-", Token::Minus),
            ("*", Token::Mul),
            ("/", Token::Div),
            ("%", Token::Mod),
            ("=", Token::Assign),
            ("==", Token::Eq),
            ("!=", Token::Neq),
            ("<", Token::Lt),
            (">", Token::Gt),
            ("<=", Token::Le),
            (">=", Token::Ge),
            ("!", Token::Not),
            ("&&", Token::And),
            ("||", Token::Or),
            ("(", Token::LParen),
            (")", Token::RParen),
            ("{", Token::LBrace),
            ("}", Token::RBrace),
            ("[", Token::LBrackt),
            ("]", Token::RBrackt),
            (",", Token::Comma),
            (";", Token::Semicolon),
            ("identifier_123", Token::Ident("identifier_123".to_string())),
            ("12345", Token::IntegerConst("12345".to_string())),
            ("0x1A3F", Token::IntegerConst("0x1A3F".to_string())),
            ("0XABCDEF", Token::IntegerConst("0XABCDEF".to_string())),
            ("01234567", Token::IntegerConst("01234567".to_string())),
            ("   \t\n", Token::Ws("   \t\n".to_string())),
            (
                "// This is a line comment\n",
                Token::LineComment("// This is a line comment\n".to_string()),
            ),
            (
                "/* This is a \n multiline comment */",
                Token::MultilineComment("/* This is a \n multiline comment */".to_string()),
            ),
        ]
    });

    static TESTS_INVALID: LazyLock<Vec<&'static str>> = LazyLock::new(|| {
        vec![
            "123abc",                   // 数字开头的标识符
            "0xGHIJK",                  // 非法的十六进制数
            "0x",                       // 不完整的十六进制数
            "08",                       // 非法的八进制数
            "/* unclosed comment",      // 未闭合的多行注释
            "// unclosed line comment", // 未闭合的行注释 (虽然行注释不需要闭合，但这里为了测试无效输入)
        ]
    });

    #[test]
    fn test_token_parse() {
        let tests = &*TESTS;
        for (input, expected) in tests {
            let result = Token::parse(input).unwrap();
            assert_eq!(result, expected.clone());
        }
    }

    #[test]
    fn test_token_parse_invalid() {
        let tests = &*TESTS_INVALID;
        for input in tests {
            let result = Token::parse(input);
            assert!(
                result.is_err(),
                "Input '{}' should be invalid, but parsed: {result:?}",
                input
            );
        }
    }
}
