from lexer.token.token import Token
from lexer.token.token_type import TokenType

class Identifier(Token):
    keywordDic = {
        "int": TokenType.KW_INT,
        "char": TokenType.KW_CHAR,
        "void": TokenType.KW_VOID,
        "extern": TokenType.KW_EXTERN,
        "if": TokenType.KW_IF,
        "else": TokenType.KW_ELSE,
        "switch": TokenType.KW_SWITCH,
        "case": TokenType.KW_CASE,
        "default": TokenType.KW_DEFAULT,
        "while": TokenType.KW_WHILE,
        "for": TokenType.KW_FOR,
        "break": TokenType.KW_BREAK,
        "continue": TokenType.KW_CONTINUE,
        "return": TokenType.KW_RETURN,
    }

    def __init__(self, name):
        super(Identifier, self).__init__(TokenType.IDENTIFIER)
        self.name = name

    def __str__(self):
        return "identifier: " + self.name

    def __repr__(self):
        return self.__str__()

    @classmethod
    def isKeyword(cls, string):
        return string in cls.keywordDic

    @classmethod
    def keywordType(cls, string):
        return cls.keywordDic[string]
