import tokenize as tk



class Lexer(object):
    def __init__(self, fname):
        self.f = open(fname)
        self.lex = tk.generate_tokens(self.f.readline)
        self.cur = next(self.lex)

    def get_next(self):
        if self.cur is None:
            self.cur = next(self.lex)

        return self.cur

    def report_error(self, msg):
        print(msg)
        exit()

    def match(self, ttype):
        if self.cur is None:
            self.report_error("")

        if self.cur.type != ttype:
            self.report_error("expected %s, but got %s" % (ttype, self.cur.type))

        self.cur = next(self.lex)



lexer = Lexer("expr.txt")

# expression = term ( ('+'|'-') term ）*
# term = factor (('*' | '/') factor) *
# factor = NUMBER | '(' expr ')'

# expression = term (+ term)*
def expression():
    a = term()
    t = lexer.get_next()

    while (t.type == 54 and (t.string == "+" or t.string == "-")):
        lexer.match(t.type)
        b = term()
        if (t.string == "+"):
            a += b
        else:
            a -= b

        t = lexer.get_next()

    return a

# term = factor * factor
def term():
    a = factor()
    t = lexer.get_next()

    while (t.type == 54 and (t.string == "*" or t.string == "/")):
        lexer.match(t.type)
        b = factor()
        if (t.string == "*"):
            a *= b
        else:
            a //= b

        t = lexer.get_next()

    return a

# factor = - factor
# factor = NUMBER | '(' expression ')'
def factor():
    v = lexer.get_next()
    if v.type == 2:
        lexer.match(v.type)
        return ConstValueNode(int(v.string))
    elif v.type == 54 and v.string == "(":
        lexer.match(v.type)
        t = expression()
        lexer.match(54)  # ")"
        return t

print(expression())

