import ply.lex as lex

# Token list
tokens = (
    'NUMBER', 'FLOAT', 'STRING',
    'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'EQUALS', 'BITAND', 'BITOR', 'MOD',
    'GT', 'LT', 'GE', 'LE', 'EQ', 'NEQ',
    'OR', 'AND', 'NOT',
    'LPAREN', 'RPAREN',
    'LBRACE', 'RBRACE',
    'LBRACKET', 'RBRACKET',  # []
    'SEMI', 'COMMA',
    'VAR', 'FUNCTION', 'PROTOTYPE', 'IF', 'ELSE', 'WHILE', 'TRY', 'CATCH',  # 'FOR',
    'IDENTIFIER', 'ARROW', 'RETURN', 'DOLLAR'
)

# Ignored characters (spaces and tabs)
t_ignore = ' \t'

# Token specifications (as regex)
t_PLUS = r'\+'
t_MINUS = r'-'
t_TIMES = r'\*'
t_DIVIDE = r'/'
t_BITAND = r'&'
t_BITOR = r'\|'
t_MOD = r'%'
t_GT = r'>'
t_LT = r'<'
t_GE = r'>='
t_LE = r'<='
t_EQ = r'=='
t_NEQ = r'!='
t_EQUALS = r'='
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_LBRACE = r'\{'
t_RBRACE = r'\}'
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_SEMI = r';'
t_COMMA = r','
t_ARROW = r'->'
t_DOLLAR = r'\$'

# Reserved words
reserved = {
    'var': 'VAR',
    'function': 'FUNCTION',
    'prototype': 'PROTOTYPE',
    'if': 'IF',
    'else': 'ELSE',
    'while': 'WHILE',
    'for': 'FOR',
    'try': 'TRY',
    'catch': 'CATCH',
    'return': 'RETURN',
    'or': 'OR',
    'and': 'AND',
    'not': 'NOT'
}


def t_COMMENT_SINGLELINE(t):
    r'//.*'
    pass


# Identifiers and reserved words
def t_IDENTIFIER(t):
    r'[a-zA-Z_][a-zA-Z_0-9]*'
    t.type = reserved.get(t.value, 'IDENTIFIER')  # Check for reserved words
    return t


# Number literal
def t_NUMBER(t):
    r'(?<![\.\d\)a-zA-Z_0-9$])[+-]?\d+\b(?!\.)'
    t.value = int(t.value)
    return t


def t_FLOAT(t):
    r'(?<![\d\d\)a-zA-Z_0-9$])[+-]?(\d+\.\d+)'
    t.value = float(t.value)
    return t


# String literal
def t_STRING(t):
    r'".*"'
    t.value = str(t.value)[1:-1]
    return t


# Line number tracking
def t_newline(t):
    r'\n+'
    t.lexer.lineno += len(t.value)


# Error handling rule
def t_error(t):
    print(f"Illegal character '{t.value[0]}'")
    t.lexer.skip(1)


# Build the lexer
lexer = lex.lex()

if __name__ == "__main__":
    # Test it out
    data = """
    var b = ["asdasdasda"];
        print(b);
        b = 2;
        1+1;
    """

    # Give the lexer some input
    lexer.input(data)

    # Tokenize
    while True:
        tok = lexer.token()
        if not tok:
            break
        print(tok)
