#!/usr/bin/python -S
"""
bootstrap.py

Bootstrapping the meta-grammar.

The meta-grammar is specified in a lisp-ish s-expression syntax.

Actions receive tokens or the return value of other actions.

Tokens look like (string type, value, integer ID).

Inside an action, you can validate stuff and throw errors.  You can use the ID
to look up error information.

TODO: Should all actions get a context object?

Then they can do:

context.error() -> report an error
context.getpos(token_id) -> positional information recorded by the lexer.

Look at JSON Template.


"""

from pprint import pprint

import util


DEFS = {
    'Start': ['Seq', ['Plus', ['Ref', 'Definition']], ['Not', ['Dot']]],

    'Definition': ['Seq', ['TokenRef', 'IDENTIFIER'],
                          ['TokenRef', '<-'],
                          ['Ref', 'Choice'],
                  ],

    'Choice': ['Seq', ['Ref', 'Sequence'],
                      ['Star', ['Seq', ['TokenRef', '/'],
                                       ['Ref', 'Sequence']]],
                  ],
    # Must be Plus instead of Star so that an empty list of tokens is not a
    # sequence.
    'Sequence': ['Plus', ['Ref', 'Prefix']],
    'Prefix': ['Seq', ['Question', ['Choice', ['TokenRef', 'AND'],
                                              ['TokenRef', 'NOT']]],
                      ['Ref', 'Suffix'],
              ],
    'Suffix': ['Seq', ['Ref', 'Primary'],
                      ['Question', ['Choice', ['TokenRef', 'QUESTION'],
                                              ['TokenRef', 'STAR'],
                                              ['TokenRef', 'PLUS'],
                                   ]
                      ]
              ],
    'Primary':
        ['Choice',
            ['TokenRef', 'TOKEN'],
            ['TokenRef', 'DOT'],
            ['Seq', ['TokenRef', 'IDENTIFIER'], ['Not', ['TokenRef', '<-']]],
            ['Seq', ['TokenRef', '('], ['Ref', 'Choice'], ['TokenRef', ')']],
        ],
    }


def Start(parsed):
  """Put definitions in a compact dict."""
  # print 'Grammar', p
  defs = {}
  for definition in parsed[0]:
    assert len(definition) == 3, definition
    _, name, value = definition
    defs[name] = value
  return defs


def Definition(parsed):
  #print 'Definition', p
  assert len(parsed) == 3, parsed
  name, _, value = parsed
  # TODO: This is the LHS.  Fix IDENTIFIER to return Identifier, and then change
  # it to Ref on the RHS?
  assert name[0] == 'Ref', name
  return ['Definition', name[1], value]


def Choice(parsed):
  assert len(parsed) == 2, parsed
  #print '\tChoice', p, len(p)

  first, rest = parsed

  # Collapse singleton CHOICES.
  if rest == []:
    return first

  # Flattens second part
  choices = [first]
  #print 'Choice first', first
  # print 'Choice rest', rest
  for c in rest:
    assert len(c) == 2, c
    choices.append(c[1])

  return ['Choice'] + choices


def Sequence(parsed):
  """
  Args:
    parsed: A list of whatever Prefix returns.
  """
  assert len(parsed) != 0, parsed

  # Collapse singleton sequences.
  if len(parsed) == 1:
    return parsed[0]

  return ['Seq'] + parsed


def Prefix(parsed):
  assert len(parsed) == 2, parsed
  first, rest = parsed

  # It's just a plain suffix, with no operator.
  if first == []:
    return rest

  # e.g. (0, ('AND', None, 7))
  index, token = first
  token_name, _, _ = token
  node_name = token_name.title()  # AND -> And

  return [node_name, rest]


def Suffix(parsed):
  assert len(parsed) == 2, parsed
  first, rest = parsed

  # It's just a plain primary, with no operator.
  if rest == []:
    return first

  # (1, ('STAR', None, 7))
  index, token = rest
  token_name, _, _ = token
  node_name = token_name.title()  # PLUS -> Plus
  return [node_name, first]


def Primary(parsed):
  index, captured = parsed
  if index == 0:  # Token
    return captured
  if index == 1:  # .
    return captured
  elif index == 2:  # Identifier
    assert len(captured) == 1, captured
    return captured[0]
  elif index == 3:  # ( Choice )
    assert len(captured) == 3, captured
    return captured[1]
  else:
    raise AssertionError(index)


def IDENTIFIER(parsed):
  # An identifier on the LHS or RHS.
  _, name, _ = parsed
  return ['Ref', name]


def TOKEN(parsed):
  # A reference to a token.
  _, name, _ = parsed
  return ['TokenRef', name]


def DOT(parsed):
  return ['Dot']


def GetParser():
  import tpe_compile
  # Actions for each definition.
  actions = util.get_actions(__name__)
  return tpe_compile.Parser(DEFS, actions)
