#!/usr/bin/python
"""
tpe_compile.py

Compiles s-expression output to a tree of nodes (in tpe_nodes.py).

TODO

- Compilation process need a second stage to detect *unreferenced symbols*
"""

import tpe_bootstrap
import tpe_lex
import tpe_nodes

__all__ = ['Parser']


def log(msg, *args):
  if args:
    msg = msg % args


class Compiler(object):

  def __init__(self, actions):
    """
    Args:
      actions: Dictionary of { name -> action function }

    While processing compiling definitions, RNodes will be wrapped around
    Matcher objects.  TODO: Rename RNodes to ActionNodes?
    """
    self.actions = actions
    # TODO: We could memoize more values, so instances like Range('07') and Range('07')
    # can be shared?  Right now we share Ref('Sequence') and so forth.
    self.compiled = {}
    self.refs = {}
    # refs is filled with any references constructed, so they can be shared.
    # compiled is passed to references so they can be resolved at runtime.

  def CompileDefs(self, defs):
    """Compile a dictionary of s-expressions."""
    for identifier, expr in defs.iteritems():
      #print 'COMPILING', identifier, expr
      node = self.CompileExpr(expr)
      a = self.actions.get(identifier)
      if a:
        node = tpe_nodes.RNode(node, a)

      self.compiled[identifier] = node

    return self.compiled

  def CompileExpr(self, expr, depth=0):
    """Recursive procedure to compile an s-expression.

    Args:
      compiled: dictionary of { identifier -> matcher object }, passed to Ref.
    """
    def log(msg, *args):
      if args:
        msg = msg % args
      print depth * '   ', msg

    #print 'CompileExpr', expr
    assert isinstance(expr, list), expr
    node_type = expr[0]

    # Reference to another definition.
    if node_type == 'Ref':
      name = expr[1]
      try:
        matcher = self.refs[name]
        #log('Using CACHED definition of %s', name)
      except KeyError:
        matcher = tpe_nodes.Ref(name, self.compiled)
        self.refs[name] = matcher
    # Reference to a token.
    elif node_type == 'TokenRef':
      # NOTE: For now we're going to share 'refs', since we have different naming.
      name = expr[1]
      try:
        matcher = self.refs[name]
        #log('Using CACHED definition of %s', name)
      except KeyError:
        # Actions can be registered by name.  These functions have to be CAPS
        # then.  They are mixed in the same namespace as actual tokens.
        # TODO: You could consider the t_ naming convention?  Meh.
        matcher = tpe_nodes.TokenRef(name)
        #print 'RESOLVING token', name
        a = self.actions.get(name)
        if a:
          matcher = tpe_nodes.RNode(matcher, a)
        self.refs[name] = matcher

    else:
      cls = getattr(tpe_nodes, node_type)
      arity = getattr(cls, 'ARITY', None)
      if arity == 0:
        matcher = cls()
      elif arity == 1:
        assert len(expr) == 2, "Too many args: %s" % expr
        arg = expr[1]
        #log('Compiling arg %s', arg)
        matcher = cls(self.CompileExpr(arg, depth=depth+1))
      elif arity == 2:
        # Special case: Range has arity2, and arguments have to be literals.
        arg1, arg2 = expr[1], expr[2]
        matcher = cls(arg1, arg2)
      else:
        args = expr[1:]
        #log('Compiling args %s', args)
        args = [self.CompileExpr(a, depth=depth+1) for a in args]

        for a in args:
          assert a is not True, args
        matcher = cls(args)

    return matcher


class Parser(object):
  # TODO: Need a top level symbol.

  def __init__(self, rules, actions=None):
    """
    Args:
      rules: string, list of strings, or already-parsed dict.
      actions: dict of { rule name -> function }
    """
    if isinstance(rules, list):
      rules = ''.join(rules)
    elif isinstance(rules, dict):  # for bootstrapping
      defs = rules
    else:
      # Compile meta grammar
      lexer = tpe_lex.GetLexer()
      #print 'GRAMMAR str', grammar_str
      tokens = list(lexer.lex(rules))
      #print 'GRAMMAR tokens', tokens
      meta_parser = tpe_bootstrap.GetParser()
      defs, pos = meta_parser.parse(tokens)

    actions = actions or {}
    # While recursing through the tree structure, global state has to be
    # maintained, so we use a class here.
    c = Compiler(actions)
    self.compiled = c.CompileDefs(defs)

  # parse a string.  Analogous to re.match, etc.
  # Do we need re.replace or anything?
  def parse(self, s, start='Start'):
    assert isinstance(start, basestring), start
    node = self.compiled[start]
    return tpe_nodes.Execute(node, s)
