# proevolytx@gmail.com
# 2012/10/3

from lexer import *
from preparser import *
from time import *
from os import *

# since python grammer is not LL(1), when parsing top-down,
# we need to read previous tokens.
# the lexer below offers Prev() func to read previous tokens
class TokenFlow:
    def __init__(s, tokens):
        s.tokens = tokens
        s.index = 0
        
    def Peek(s):
        if s.index < 0 or len(s.tokens) == 0:
            return {"type":"none", "line":0}
        elif s.index >= len(s.tokens):
            return {"type":"none", "line":s.tokens[-1]["line"]}
        else:
            return s.tokens[s.index]
            
    def Next(s):
        s.index += 1
        
    def Prev(s):
        s.index -= 1

    def PeekIs(s, token):
        p = s.Peek()
        for key in token:
            if key not in p or p[key] != token[key]:
                return False
        return True

    def PeekIsAny(s, tokens):
        for token in tokens:
            if s.PeekIs(token):
                return True
        return False

    def Match(s, token):
        if s.PeekIs(token):
            result = s.Peek()
            s.Next()
            return result
        else:
            raise Exception(str(token) + " expected, but " +
                            str(s.Peek()) + " instead")

#---------------------------------------------------------------------
# parsers
#---------------------------------------------------------------------

class MonoParser:
    def __init__(s, parser):
        s.parser = parser

class BinaryParser:
    def __init__(s, lparser, rparser):
        s.lparser = lparser
        s.rparser = rparser

# parser for operators
class OpParser(MonoParser):
    def __init__(s, parser, opers):
        MonoParser.__init__(s, parser)
        s.opers = opers

    def MatchOp(s, lex):
        # we try to match the longest operator
        # example: we would match "not in" rather than "not" and "in"
        maxlen = 0
        for op in s.opers:
            if type(op) is str and maxlen == 0:
                # single keyword operator(most operators)
                if lex.PeekIs({"type":"keyword", "content":op}):
                    maxlen = 1
                    result = deepcopy(lex.Peek())
                    
            elif maxlen < len(op):
                # operator that has a seq of keywords
                # example: like "not in"(op is ["not", "in"])
                #          output "content" field will be "notin"
                accept = True
                forward = 0
                # try to match op
                for i, oper in enumerate(op):
                    if lex.PeekIs({"type":"keyword", "content":oper}):
                        forward += 1
                        lex.Next()
                    else:
                        accept = False
                        break
                for i in range(forward): lex.Prev()
                
                if accept:
                    maxlen = len(op)
                    result = deepcopy(lex.Peek())
                    result["content"] = "".join(op)
                    
        if maxlen > 0:
            for i in range(maxlen): lex.Next()
            return result

class BinaryOpParser(OpParser):
    # associate can be "L" or "R"
    # if "L", then a op b op c === (a op b) op c
    # if "R", then a op b op c === a op (b op c)
    def __init__(s, parser, opers, associate):
        OpParser.__init__(s, parser, opers)
        s.associate = associate

    # this -> parser (op parser)*
    # returns {type:binary-op, operator(lexeme), left, right}
    def Parse(s, lex):
        first = s.parser.Parse(lex)
        terms, ops = [first], []
        while True:
            op = s.MatchOp(lex)
            if op is not None:
                ops.append(op)
                term = s.parser.Parse(lex)
                terms.append(term)
            else:
                break

        if s.associate == "R":
            terms.reverse()
            ops.reverse()

        while ops:
            left = terms[0]
            right = terms[1]
            if s.associate == "R":
                left, right = right, left
            op = {"type" : "binary-op",
                  "operator" : ops[0],
                  "left" : left,
                  "right" : right}
            del terms[1], terms[0], ops[0]
            terms = [op] + terms

        assert len(terms) == 1
        return terms[0]

# parser for "not", "+x", "-x"
class MonoOpParser(OpParser):
    # this -> (op)* parser
    # returns {type:mono-op, operator(lexeme), right}
    def Parse(s, lex):
        op = s.MatchOp(lex)
        if op is not None:
            right = s.Parse(lex)
            result = {"type" : "mono-op",
                      "operator" : op,
                      "right":right}
            return result
        else:
            return s.parser.Parse(lex)

# parser for operators like "<", ">" to support grammer like "a < b < c"
# a op b op c op ... y op z === (a op b) and (b op c) and .... and (y op z)
class ComparisonOpParser(OpParser):
    # this -> parser (op parser)*
    # returns {type:binary-op, operator(lexeme), left, right}
    def Parse(s, lex):
        first = s.parser.Parse(lex)
        terms, ops = [first], []
        while True:
            op = s.MatchOp(lex)
            if op is not None:
                ops.append(op)
                term = s.parser.Parse(lex)
                terms.append(term)
            else:
                break

        if len(ops) == 0:
            return terms[0]

        com = []
        if ops:
            linenum = ops[0]["line"]
        for i, op in enumerate(ops):
            tmp = {"operator" : op,
                   "type" : "binary-op",
                   "left" : terms[i],
                   "right" : terms[i + 1]}
            com.append(tmp)

        # use "and" to connect all comparisons
        while len(com) > 1:
            left = com[0]
            right = com[1]
            op = {"type" : "keyword",
                  "content" : "and",
                  "line" : linenum}
            tmp = {"operator" : op,
                   "type" : "binary-op",
                   "left" : left,
                   "right" : right}
            del com[1], com[0]
            com = [tmp] + com
            
        return com[0]

# parser for packed exprs
class PackedExprParser(MonoParser):
    # this -> parser, parser, ..., parser(,)?
    # returns {type:"packed", exprs:[expr], commas}
    # return value will not be used to form syntax tree
    def Parse(s, lex, ends):
        exprs = []
        commas = 0
        while True:
            if lex.PeekIsAny(ends):
                break
            
            expr = s.parser.Parse(lex)
            exprs.append(expr)
            
            p = lex.Peek()
            if p["type"] == "keyword":
                if p["content"] == ",":
                    lex.Next()
                    commas += 1
                else:
                    break
            else:
                break
            
        result = {"type":"packed", "exprs":exprs, "commas":commas}
        return result

# parser for x.attr, x[index], x[begin:end], x(args)
class AttrIndexOrCallParser:
    def __init__(s, firstParser, indexParser, packedExprParser):
        s.firstParser = firstParser
        s.indexParser = indexParser
        s.packedExprParser = packedExprParser

    # this -> firstParser(postfix)?
    # postfix -> .id(postfix)? <1>
    # postfix -> [index](postfix)?
    # postfix -> (packedExprParser)(postfix)? <4>
    # index -> indexParser <2>
    # index -> indexParser : <3>
    # index -> indexParser : indexParser <3>
    # index -> : <3>
    # index -> : indexParser <3>
    #
    # returns
    # <1> {type:binary-op, operator:lexeme("."), left, right(lexeme)}
    # <2> {type:index, left, index}
    # <3> {type:slice, left, beg, end}
    # <4> {type:call, left, args}
    def Parse(s, lex):
        left = s.firstParser.Parse(lex)
        while True:
            # case <1>
            if lex.PeekIs({"type":"keyword", "content":"."}):
                op = lex.Peek()
                lex.Next()
                if lex.Peek()["type"] != "id":
                    raise Exception("Id expected, " +
                                    str(lex.Peek()) + " instead")
                left = {"type" : "binary-op",
                        "operator" : op,
                        "left" : left,
                        "right" : lex.Peek()}
                lex.Next()

            # case <2> or <3>
            elif lex.PeekIs({"type":"keyword", "content":"["}):
                lex.Next()
                beg = None
                end = None
                index = True # set to False when meet ":"

                if not lex.PeekIs({"type":"keyword", "content":":"}):
                    beg = s.indexParser.Parse(lex)

                if lex.PeekIs({"type":"keyword", "content":":"}):
                    lex.Next()
                    index = False
                    if lex.PeekIs({"type":"keyword", "content":"]"}):
                        lex.Next()
                    else:
                        end = s.indexParser.Parse(lex)
                        lex.Match({"type":"keyword", "content":"]"})                                
                else:
                    lex.Match({"type":"keyword", "content":"]"})

                if index:
                    left = {"type" : "index",
                            "left" : left,
                            "index" : beg}
                else:
                    left = {"type" : "slice",
                            "left" : left,
                            "beg" : beg,
                            "end" : end}

            # case <4>
            elif lex.PeekIs({"type":"keyword", "content":"("}):
                lex.Next()
                end = {"type":"keyword", "content":")"}
                args = s.packedExprParser.Parse(lex, [end])
                lex.Match(end)
                left = {"type" : "call",
                        "left" : left,
                        "args" : args["exprs"]}

            else:
                break
            
        return left

# parser for factor
class FactorParser:
    def __init__(s, packedExprParser, exprParser):
        s.packedExprParser = packedExprParser
        s.exprParser = exprParser

    # this -> int | float | string | id (direct output) <1>
    # this -> (exprParser) <2>
    # this -> (packedExprParser) (comma >= 1) <3>
    # this -> [packedExprParser] <4>
    # this -> {exprParser : exprParser, ..., exprParser : exprParser (,)?} <5>
    #
    # returns
    # <1> lexems(int, float, string, id)
    # <2> exprParser
    # <3> {type:tuple, tuple:[expr]}
    # <4> {type:list, list:[expr]}
    # <5> {type:dict, dict:[[key, value]]}
    def Parse(s, lex):
        peek = lex.Peek()

        # case <1>
        if peek["type"] in ["int", "id", "string", "float"]:
            lex.Next()
            return deepcopy(peek)

        # case <2> or <3>
        elif peek["type"] == "keyword" and peek["content"] == "(":
            lex.Next()
            end = {"type":"keyword", "content":")"}
            packed = s.packedExprParser.Parse(lex, [end])
            lex.Match(end)
            if packed["commas"] == 0 and packed["exprs"]:
                result = packed["exprs"][0]
            else:
                result = {"type" : "tuple",
                          "tuple" : packed["exprs"]}
            return result

        # case <4>
        elif peek["type"] == "keyword" and peek["content"] == "[":
            lex.Next()
            end = {"type":"keyword", "content":"]"}
            packed = s.packedExprParser.Parse(lex, [end])
            lex.Match(end)
            result = {"type" : "list",
                      "list" : packed["exprs"]}
            return result

        # case <5>
        elif peek["type"] == "keyword" and peek["content"] == "{":
            lex.Next()
            kvpairs = []
            while True:
                peek = lex.Peek()
                if peek["type"] == "keyword" and peek["content"] == "}":
                    lex.Next()
                    break
                
                key = s.exprParser.Parse(lex)
                lex.Match({"type" : "keyword", "content" : ":"})
                value = s.exprParser.Parse(lex)
                kvpairs.append([key, value])

                if lex.PeekIs({"type":"keyword", "content":","}):
                    lex.Next()
                elif not lex.PeekIs({"type":"keyword", "content":"}"}):
                    raise Exception("} expected " + str(peek) +
                                    " instead")

            result = {"type" : "dict",
                      "dict" : kvpairs}
            return result
        
        else:
            raise Exception("factor expected, " + str(peek) + " instead")

# parser for expression statements or assignment statements
class AssignOrExprStmtParser(MonoParser):
    # parser: should be PackedExprParser
    # opers: incremental assign operators like ["+=", "-=" ...]
    def __init__(s, parser, opers):
        MonoParser.__init__(s, parser)
        s.opers = opers

    # this-> parser (exprs == 1, commas == 0) <1>
    # this-> parser = parser = ... = parser (exprs >= 1) <2>
    # this-> parser += parser (exprs == 1, commas == 0) <3>
    #
    # returns
    # <1> parser
    # <2> {type:assign, left:parsers[except last], right:parsers[last]}
    # <3> {type:assign, left:[first parser], right:second parser}
    # the parsers in left and right fields should be tuple or list
    def Parse(s, lex):
        endTokens = [{"type":"newline"},
                     {"type":"keyword", "content":"="}]
        expr = s.parser.Parse(lex, endTokens)
        if len(expr["exprs"]) < 1:
            raise Exception("At least one expression expected")

        # case <1>
        if lex.PeekIs({"type":"newline"}):
            if len(expr["exprs"]) != 1:
                raise Exception("Packed expression not allowed")
            return expr["exprs"][0]

        # case <2>
        elif lex.PeekIs({"type":"keyword", "content":"="}):
            lex.Next()
            exprs = [expr]
            while True:
                expr = s.parser.Parse(lex, endTokens)
                if len(expr["exprs"]) < 1:
                    raise Exception("At least one expression expected")
                exprs.append(expr)
                p = lex.Peek()
                if lex.PeekIs({"type":"keyword", "content":"="}):
                    lex.Next()
                else:
                    break

            # each expr in exprs will be tuple
            newexprs = []
            for expr in exprs:
                tupl = {"type":"tuple", "tuple":expr["exprs"]}
                newexprs.append(tupl)
            
            result = {"type" : "assign",
                      "left" : newexprs[0 : len(newexprs) - 1],
                      "right" : newexprs[-1]}
            return result

        # case <3>
        elif lex.Peek()["type"] == "keyword" and \
             lex.Peek()["content"] in s.opers:
            p = lex.Peek()
            op = p["content"]
            op = {"type" : "keyword",
                  "line" : p["line"],
                  "content" : op[0 : len(op) - 1]} # remove the ending "="
            lex.Next()
            
            right = s.parser.Parse(lex, endTokens)
            if len(expr["exprs"]) != 1 or len(right["exprs"]) != 1:
                raise Exception("Packed expression not allowed")
            left = expr["exprs"][0]
            right = right["exprs"][0]
            
            opnode = {"type" : "binary-op",
                      "operator" : op,
                      "left" : expr["exprs"][0],
                      "right" : right}
            optuple = {"type" : "tuple",
                      "tuple" : [opnode]}
            lefttuple = {"type" : "tuple",
                         "tuple" : [left]}
            result = {"type" : "assign",
                      "left" : [lefttuple],
                      "right" : optuple}
            return result
            
        else:
            raise Exception("newline or assign-ops expected, " +
                            str(lex.Peek()) + " instead")

class SimpleStmtParser:
    def __init__(s, assignParser, exprParser, packedExprParser):
        s.assignParser = assignParser
        s.exprParser = exprParser
        s.packedExprParser = packedExprParser

    # this -> break newline <1>
    # this -> continue newline <2>
    # this -> assert exprParser newline <3>
    # this -> return exprParser? newline <4>
    # this -> raise Exception ( exprParser ) newline <5>
    # this -> print packedExprParser (exprs >= 1) newline <6>
    # this -> del packedExprParser (exprs >= 1) newline <7>
    # this -> pass newline <8>
    # this -> exec exprParser newline <9>
    # this -> exprParser newline <10>
    #
    # returns
    # <1> {type:break}
    # <2> {type:continue}
    # <3> {type:assert, expr}
    # <4> {type:return, expr} (if expr doesnot exsit, set it to id:None)
    # <5> {type:raise, expr}
    # <6> {type:print, exprs:[expr], newline(bool)}
    # <7> {type:del, exprs:[expr]}
    # <8> {type:pass}
    # <9> {type:exec, expr}
    # <10> exprParser
    def Parse(s, lex):
        p = lex.Peek()

        # case <1> or <2>
        if p["type"] == "keyword" and p["content"] in ["break", "continue"]:
            lex.Next()
            lex.Match({"type":"newline"})
            return {"type":p["content"]}

        # case <3>
        elif p["type"] == "keyword" and p["content"] == "assert":
            lex.Next()
            expr = s.exprParser.Parse(lex)
            lex.Match({"type":"newline"})
            result = {"type":p["content"], "expr":expr}
            return result

        # case <4>
        elif p["type"] == "keyword" and p["content"] == "return":
            lex.Next()
            if lex.PeekIs({"type":"newline"}):
                # return means return None
                expr = {"type" : "id",
                        "content" : "None",
                        "line" : lex.Peek()["line"]}
            else:
                expr = s.exprParser.Parse(lex)
            lex.Match({"type":"newline"})
            result = {"type":p["content"], "expr":expr}
            return result

        # case <5>
        elif p["type"] == "keyword" and p["content"] == "raise":
            lex.Next()
            lex.Match({"type":"keyword", "content":"Exception"})
            lex.Match({"type":"keyword", "content":"("})
            expr = s.exprParser.Parse(lex)
            lex.Match({"type":"keyword", "content":")"})
            lex.Match({"type":"newline"})
            result = {"type":"raise", "expr":expr}
            return result

        # case <6> or <7>
        elif p["type"] == "keyword" and p["content"] in ["print", "del"]:
            lex.Next()
            endTokens = [{"type":"newline"}]
            packed = s.packedExprParser.Parse(lex, endTokens)
            exprs = packed["exprs"]
            lex.Match({"type":"newline"})
            result = {"type" : p["content"],
                      "exprs" : exprs}
            if p["content"] == "print":
                result["newline"] = (
                    packed["commas"] < len(exprs) or len(exprs) == 0)
            return result

        # case <8>
        elif p["type"] == "keyword" and p["content"] == "pass":
            lex.Next()
            lex.Match({"type":"newline"})
            return {"type":"pass"}

        # case <9>
        elif p["type"] == "keyword" and p["content"] == "exec":
            lex.Next()
            expr = s.exprParser.Parse(lex)
            lex.Match({"type":"newline"})
            result = {"type":"exec", "expr":expr}
            return result

        # case <10>
        else:
            expr = s.assignParser.Parse(lex)
            lex.Match({"type":"newline"})
            return expr

# parser for if, while, and function body
class BodyParser:
    def __init__(s, simpleParser, blockParser, tabsize):
        s.simpleParser = simpleParser
        s.blockParser = blockParser
        s.tabsize = tabsize

    # this -> newline blockParser
    # this -> simpleParser
    def Parse(s, lex, indent):
        if lex.PeekIs({"type":"newline"}):
            lex.Next()
            block = s.blockParser.Parse(lex, indent + s.tabsize)
            return block
        else:
            stmt = s.simpleParser.Parse(lex)
            return [stmt]

# parser for any statement, including newline, excluding first indent
class StmtParser:
    def __init__(s, exprParser, bodyParser, simpleStmtParser):
        s.exprParser = exprParser
        s.bodyParser = bodyParser
        s.simpleStmtParser = simpleStmtParser

    # see ParseXXX
    def Parse(s, lex, indent):
        if lex.PeekIs({"type":"keyword", "content":"for"}):
            return s.ParseFor(lex, indent)
        elif lex.PeekIs({"type":"keyword", "content":"while"}):
            return s.ParseWhile(lex, indent)
        elif lex.PeekIs({"type":"keyword", "content":"if"}):
            return s.ParseIf(lex, indent)
        elif lex.PeekIs({"type":"keyword", "content":"try"}):
            return s.ParseTry(lex, indent)
        else:
            simple = s.simpleStmtParser.Parse(lex)
            return simple

    # this -> for id (, id)* in exprParser : bodyParser
    # returns {type:for, vals:[id], expr, body}
    def ParseFor(s, lex, indent):
        lex.Next()
        ids = [deepcopy(lex.Match({"type":"id"}))]
        while True:
            if lex.PeekIs({"type":"keyword", "content":","}):
                lex.Next()
                identity = deepcopy(lex.Match({"type":"id"}))
                ids.append(identity)
            else:
                break
        lex.Match({"type":"keyword", "content":"in"})
        expr = s.exprParser.Parse(lex)
        lex.Match({"type":"keyword", "content":":"})
        body = s.bodyParser.Parse(lex, indent)
        result = {"type" : "for",
                  "vals" : ids,
                  "expr" : expr,
                  "body" : body}
        return result

    # this -> while exprParser : bodyParser
    # returns {type:while, expr, body}
    def ParseWhile(s, lex, indent):
        lex.Next()
        expr = s.exprParser.Parse(lex)
        lex.Match({"type":"keyword", "content":":"})
        body = s.bodyParser.Parse(lex, indent)
        result = {"type" : "while",
                  "expr" : expr,
                  "body" : body}
        return result

    # this -> if exprParser : bodyParser
    #         (indent elif exprParser : bodyParser)*
    #         (indent else : bodyParser)
    # returns {type:if, chain:[[expr, body]], else:body or None}
    def ParseIf(s, lex, indent):
        lex.Next()
        firstExpr = s.exprParser.Parse(lex)
        lex.Match({"type":"keyword", "content":":"})
        firstBody = s.bodyParser.Parse(lex, indent)
        chain = [[firstExpr, firstBody]]
        result = {"type":"if", "chain":chain, "else":None}
        
        while True:
            if lex.PeekIs({"type":"indent", "indent":indent}):
                lex.Next()
                if lex.PeekIs({"type":"keyword", "content":"elif"}):
                    lex.Next()
                    expr = s.exprParser.Parse(lex)
                    lex.Match({"type":"keyword", "content":":"})
                    body = s.bodyParser.Parse(lex, indent)
                    chain.append([expr, body])
                    
                elif lex.PeekIs({"type":"keyword", "content":"else"}):
                    lex.Next()
                    lex.Match({"type":"keyword", "content":":"})
                    body = s.bodyParser.Parse(lex, indent)
                    result["else"] = body
                    break
                
                else:
                    # indicates that the indent does not belong to this clause
                    lex.Prev()
                    break
            else:
                break
        return result

    # this -> try : bodyParser indent except Exception , id : bodyParser
    # result = {type:try, try-body, except-id, except-body}
    def ParseTry(s, lex, indent):
        lex.Next()
        lex.Match({"type":"keyword", "content":":"})
        body = s.bodyParser.Parse(lex, indent)
        lex.Match({"type":"indent", "indent":indent})
        lex.Match({"type":"keyword", "content":"except"})
        lex.Match({"type":"keyword", "content":"Exception"})
        lex.Match({"type":"keyword", "content":","})
        identity = deepcopy(lex.Match({"type":"id"}))
        lex.Match({"type":"keyword", "content":":"})
        exceptbody = s.bodyParser.Parse(lex, indent)

        result = {"type" : "try",
                  "try-body" : body,
                  "except-id" : identity,
                  "except-body" : exceptbody}
        return result

# parser for any statement, including first indent and newline
class IndentStmtParser:
    def __init__(s, stmtParser):
        s.stmtParser = stmtParser
        
    def Parse(s, lex, indent):
        lex.Match({"type":"indent", "indent":indent})
        stmt = s.stmtParser.Parse(lex, indent)
        return stmt

# parser for a block of indented statements
class BlockStmtParser:
    def __init__(s, indentStmtParser):
        s.indentStmtParser = indentStmtParser

    # returns a list of statements
    def Parse(s, lex, indent):
        result = []
        while lex.PeekIs({"type":"indent", "indent":indent}):
            stmt = s.indentStmtParser.Parse(lex, indent)
            result.append(stmt)
        return result

# parser for from-import statement
class ImportParser:
    def __init__(s, imports):
        s.imports = imports
    
    # this -> from id import (* | id)
    # returns {type:import, lib, func:id or None}
    def Parse(s, lex):
        lex.Match({"type":"keyword", "content":"from"})
        libname = lex.Match({"type":"id"})
        lex.Match({"type":"keyword", "content":"import"})
        result = {"type":"import", "lib":libname, "func":None}
        if lex.PeekIs({"type":"keyword", "content":"*"}):
            lex.Next()
        else:
            func = lex.Match({"type":"id"})
            result["func"] = func
        lex.Match({"type":"newline"})

        s.imports.append(result)
        
        return result

# parser for functions
class FuncParser:
    def __init__(s, exprParser, bodyParser):
        s.exprParser = exprParser
        s.bodyParser = bodyParser

    # this -> def id \( (vid (= exprParser)? ,)* \) : bodyParser
    #     last comma could be omitted
    # returns {type:function, name, args:[[id, expr or None]], body}
    def Parse(s, lex, indent):
        lex.Match({"type":"keyword", "content":"def"})
        name = lex.Match({"type":"id"})
        lex.Match({"type":"keyword", "content":"("})
        
        args = []
        while True:
            if not lex.PeekIs({"type":"id"}):
                break
            identity = lex.Peek()
            lex.Next()
            
            if lex.PeekIs({"type":"keyword", "content":"="}):
                lex.Next()
                expr = s.exprParser.Parse(lex)
            else:
                expr = None
            args.append([identity, expr])
                
            if lex.PeekIs({"type":"keyword", "content":","}):
                lex.Next()
            else:
                break
            
        lex.Match({"type":"keyword", "content":")"})
        lex.Match({"type":"keyword", "content":":"})
        body = s.bodyParser.Parse(lex, indent)

        result = {"type":"function",
                  "name":name,
                  "args":args,
                  "body":body}
        return result

# parser for class
class ClassParser:
    def __init__(s, funcParser, stmtParser, tabsize):
        s.funcParser = funcParser
        s.stmtParser = stmtParser
        s.tabsize = tabsize

    # this -> class id (\( id? \))? : newline
    #         (( (indent @staticmethod newline)?
    #            indent funcParser) |
    #          stmtParser)*
    # returns {type:class, name, parent:id or None,
    #          methods[[func, bool(is static)]], body}
    def Parse(s, lex):
        lex.Match({"type":"keyword", "content":"class"})
        name = lex.Match({"type":"id"})
        parent = None
        if lex.PeekIs({"type":"keyword", "content":"("}):
            lex.Next()
            if lex.PeekIs({"type":"id"}):
                parent = lex.Peek()
                lex.Next()
            lex.Match({"type":"keyword", "content":")"})
        lex.Match({"type":"keyword", "content":":"})
        lex.Match({"type":"newline"})
        
        methods = []
        body = []
        while True:
            if not lex.PeekIs({"type":"indent", "indent":s.tabsize}):
                break
            lex.Next()
            
            if lex.PeekIs({"type":"keyword", "content":"@staticmethod"}):
                lex.Next()
                lex.Match({"type":"newline"})
                lex.Match({"type":"indent", "indent":s.tabsize})
                func = s.funcParser.Parse(lex, s.tabsize)
                methods.append([func, True])
                
            elif lex.PeekIs({"type":"keyword", "content":"def"}):
                func = s.funcParser.Parse(lex, s.tabsize)
                methods.append([func, False])

            else:
                stmt = s.stmtParser.Parse(lex, s.tabsize)
                body.append(stmt)

        result = {"type" : "class",
                  "name" : name,
                  "parent" : parent,
                  "methods" : methods,
                  "body" : body}
        return result

# parser for an entire program
class ProgramParser:
    def __init__(s, importParser, funcParser,
                 classParser, stmtParser):
        s.importParser = importParser
        s.funcParser = funcParser
        s.classParser = classParser
        s.stmtParser = stmtParser
        s.imports = []

    # this -> (importParser | classParser | funcParser | stmtParser)*
    # returns a list of nodes
    def Parse(s, lex, newname, oldname):
        del s.imports[:]
        script = []
        ass = {"type" : "assign",
               "left" : [{"type" : "tuple",
                          "tuple" : [{"type" : "id",
                                      "content" : "__name__"}]}],
               "right" : {"type" : "tuple",
                          "tuple" : [{"type" : "string",
                                      "content" : newname}]}}
        script.append(ass)
        while True:
            if not lex.PeekIs({"type":"indent", "indent":0}):
                break
            lex.Next()

            if lex.PeekIs({"type":"keyword", "content":"from"}):
                tmp = s.importParser.Parse(lex)
                script.append(tmp)
            elif lex.PeekIs({"type":"keyword", "content":"class"}):
                tmp = s.classParser.Parse(lex)
                script.append(tmp)
            elif lex.PeekIs({"type":"keyword", "content":"def"}):
                tmp = s.funcParser.Parse(lex, 0)
                script.append(tmp)
            else:
                tmp = s.stmtParser.Parse(lex, 0)
                script.append(tmp)
        ass = {"type" : "assign",
               "left" : [{"type" : "tuple",
                          "tuple" : [{"type" : "id",
                                      "content" : "__name__"}]}],
               "right" : {"type" : "tuple",
                          "tuple" : [{"type" : "string",
                                      "content" : oldname}]}}
        script.append(ass)
        return script

# syntax tree printer
def SimplePrint(root, indent = ""):
    if type(root) is dict:
        if "type" in root and \
           root["type"] in ["id", "int", "float", "string", "keyword"]:
            print indent + root["type"] + ": " + str(root["content"])

        else:
            for key in root:
                SimplePrint(key, indent)
                SimplePrint(root[key], indent + "  ")
                
    elif type(root) is list:
        for i, item in enumerate(root):
            SimplePrint(item, indent)
            if i < len(root) - 1: print
            
    else:
        print indent + str(root)

def SimpleCount(root, count):
    count[0] += 1
    if type(root) is dict:
        if "type" in root and \
           root["type"] in ["id", "int", "float", "string", "keyword"]:
            return

        else:
            for key in root:
                SimpleCount(key, count)
                SimpleCount(root[key], count)
                
    elif type(root) is list:
        for i, item in enumerate(root):
            SimpleCount(item, count)

def BuildParser(tabsize):
    factor2 = FactorParser(None, None)
    factor1 = AttrIndexOrCallParser(factor2, None, None)
    exprOp9 = MonoOpParser(factor1, ["+", "-"])
    exprOp8 = BinaryOpParser(exprOp9, ["*", "/", "%"], "L")
    exprOp7 = BinaryOpParser(exprOp8, ["+", "-"], "L")
    exprOp6 = ComparisonOpParser(exprOp7, ["<", "<=", ">", ">=", "!=", "=="])
    exprOp5 = BinaryOpParser(exprOp6, ["is", ["is", "not"]], "R")
    exprOp4 = BinaryOpParser(exprOp5, ["in", ["not", "in"]], "R")
    exprOp3 = MonoOpParser(exprOp4, ["not"])
    exprOp2 = BinaryOpParser(exprOp3, ["and"], "L")
    exprOp1 = BinaryOpParser(exprOp2, ["or"], "L")
    packedExpr = PackedExprParser(exprOp1)
    factor1.indexParser = exprOp1
    factor1.packedExprParser = packedExpr
    factor2.exprParser = exprOp1
    factor2.packedExprParser = packedExpr

    stmtExpr = AssignOrExprStmtParser(packedExpr, ["+=", "-=", "*=", "/=", "%="])
    stmtSimple = SimpleStmtParser(stmtExpr, exprOp1, packedExpr)
    body = BodyParser(stmtSimple, None, tabsize)
    stmtAny = StmtParser(exprOp1, body, stmtSimple)
    stmtIndent = IndentStmtParser(stmtAny)
    stmtBlock = BlockStmtParser(stmtIndent)
    body.blockParser = stmtBlock

    fromimport = ImportParser(None)
    funcdef = FuncParser(exprOp1, body)
    classdef = ClassParser(funcdef, stmtAny, tabsize)
    program = ProgramParser(fromimport, funcdef, classdef, stmtAny)
    fromimport.imports = program.imports

    return program

def Parse(stream, lexer, parser, origindir):
    results = {}
    tree = {"type":"import", "libname":"__main__", "func":None}
    q = [{"newname" : "__main__",
          "oldname" : "__main__",
          "stream" : stream,
          "node" : tree,
          "dir" : origindir}]

    while len(q) > 0:
        first = q[0]
        del q[0]
        
        start = clock()
        lexer.SetStream(first["stream"])
        preparser = PreParser(lexer)
        tokens = preparser.Program()
        result = {}
        result["lexer-time"] = clock() - start
        result["lexer-out"] = tokens
        result["lexer-tokens"] = len(tokens)
        result["lexer-lines"] = 0
        if len(tokens) > 0:
            result["lexer-lines"] = tokens[-1]["line"]

        start = clock()
        tf = TokenFlow(tokens)
        tree = parser.Parse(tf, first["newname"], first["oldname"])
        treesize = [0]
        SimpleCount(tree, treesize)
        result["parser-nodes"] = treesize[0]
        result["parser-time"] = clock() - start
        result["parser-out"] = tree
        results[first["newname"]] = result

        first["node"]["expand"] = tree

        for node in parser.imports:
            libname = node["lib"]["content"]
            if libname in results:
                node.clear()
                node["type"] = "pass"
            else:
                assert node["type"] == "import"
                fpath = libname + ".py"
                if len(first["dir"]):
                    fpath = first["dir"] + "/" + fpath
                if path.exists(fpath) and path.isfile(fpath):
                    fs = FileStream(fpath)
                    newdir, name = path.split(fpath)
                    item = {"newname" : name[:-3],
                            "oldname" : first["newname"],
                            "stream" : fs,
                            "node" : node,
                            "dir" : newdir}
                    results[name[:-3]] = None
                    q = [item] + q
                    
    return results

if __name__ == "__main__":
    streampath = "test/test6.py"
    fs = FileStream(streampath)
    fpath, fname = path.split(streampath)
    
    start = clock()
    lex = Lexer()
    print "Lexer build time :", clock() - start

    parser = BuildParser(4)

    start = clock()
    results = Parse(fs, lex, parser, fpath)
    print "Parser run time :", clock() - start
    print

    for r in results:
        print "Module:", r
        print "Lines of code:", results[r]["lexer-lines"]
        print "Lexer time:", results[r]["lexer-time"]
        print "Lexer token count:", results[r]["lexer-tokens"]
        print "Parser time:", results[r]["parser-time"]
        print "Parser tree size:", results[r]["parser-nodes"]
        
        #SimplePrint(results[r]["parser-out"])
        print 

    print "Done"
    
