#!/usr/bin/python -S
"""
lexer.py

More TODO:

- Highlighting %s inside Python strings

Nested comments example:
  push/pop
You might need to dynamically exit the state for this.  When a counter reaches
0, then exit?

- Is there a benefit to having a stack of states?

- Possibly generalize regex to predicate.  Then you could provide a dictionary
  of keywords and have faster switching.
  - At least it will be readable, instead of the pygments style lexer of doing
    for|if|while|def|class ... and all the exceptions too.


Right now the format looks like:
  - (re, func, state)
  - (pair, state)

  PatternEdge(re, func) has a __call__
  It returns (token type, value, new pos)

  What I want to return for syntax highlighting:


  [string, color type] -- maybe multiple ones

  instead of __call__, it can also use color or something?
  StringTokens()

Coloring:

  l = annex.Lexer()
  l.lex()

  l.color()  --> return a stream of (STRING, KIND)

  scheme = {
    'literal': 'red',
    }
  annex.WriteHtml(token pairs, scheme)
  surround it with <pre>


TODO: DEFINE protocols:

New protocol:

Lexer function:

(s, pos, context) -> [(channel, type, value, pos obj), ... ], new pos

channel 0 are the tokens for parsing, channel 1 are the tokens for
highlighting.

Extractor:

(match, context) -> (type, value)


"""

import re
import sre_constants
import string

__all__ = [
    'Lexer', 'SimpleToken', 'SimpleTokenType', 'MappedTokenValue',
    'MappedTokenType']


class Error(RuntimeError):
  pass


def ch(c):
  if c in string.printable and c not in string.whitespace:
    return c
  else:
    return repr(c)


def MakeErrorString(s, pos):
  """
  Given a string and a position, produce a human-readable string that point to
  that position.

  TODO: later we could have line numbers.
  We are doing repr() now -- will that be necessary?
  """
  window = 10  # character window
  a = max(0, pos - window)
  b = pos + window
  substr = s[a:b]
  line1 = ' '.join('%-3s' % ch(c) for c in substr)
  # TODO: this is broken
  num_spaces = window * 4
  line2 = (num_spaces * ' ') + '^'
  return line1 + '\n' + line2 + '\n'


def CompileEdgeOld(edge, use_re):
  # Need to do this here to break circular dependency.
  import cre_compile

  edge = list(edge)
  if len(edge) not in (1, 2, 3):
    raise RuntimeError("Expected 1, 2 or 3 elements: %s" % e)
  while len(edge) < 3:  # fill in None for missing
    edge.append(None)

  #print 'EDGE', edge
  pat, func, new = edge
  try:
    # TODO: how to add flags?
    if use_re:
      p = re.compile(pat, re.VERBOSE)
    else:
      # TODO: also catch error for CRE compilation error.  Not sure if CREs
      # can ever fail to compile.
      p = cre_compile.Regex(pat)
      p.allow_compat()  # So we can
  except sre_constants.error, e:
    raise RuntimeError("Error compiling %r: %s" % (pat, e))

  return PatternEdge(p, func), new


class PatternEdge(object):
  """
  A function which is defined by a regex and an 'extractor'.
  """
  def __init__(self, regex, extractor, kind='DEFAULT'):
    """
    Args:
      regex: A COMPILED regular expression (with .match())
      extractor: A function from (match, context) -> token
      kind: type of token for the 'string' channel.
    """
    self.regex = regex
    if hasattr(regex, 'execute'):
      self.func = regex.execute
    else:
      self.func = regex.match
    self.extractor = extractor
    self.kind = kind

  def __str__(self):
    return '<PatternEdge %s>' % (self.regex.pattern)

  def typed(self, s, pos, context, pos_table):
    """
    Args:
      s: string to tokenize
      pos: current position in 's'
      context: dictionary that may be mutated.
      pos_table: Line number information to be updated.

    Returns:
      token_type, value: returned by extractor function
      pos: New position in string.

    NOTES:
    - Is there a use case for generating multiple tokens?
    - Test out updating the symbol table.
    """
    m = self.func(s, pos)
    if not m:
      return None, None, pos  # no match, at the same spot

    new_pos = m.end()

    if not self.extractor:
      return None, None, new_pos  # advance

    # TODO: formalize the protocol.  How to do error reporting?  Maybe
    # context.error() ?
    token = self.extractor(m, context)

    if token is None:  # None means generate no tokens
      return None, None, new_pos

    token_type, value = token
    # User can return None to use the function's name as the token
    # type.  TODO: do we still need this?
    token_type = token_type or function.__name__

    # Record positional information in the table, indexed by token_id.
    # TODO: Need to do this with line/col numbers too, not just
    # positions.
    # Oops: function should return this... maybe save all the matches?
    # The single string token encompasses multiple matches.
    if pos_table is not None:
      pos_table.append((m.start(), m.end()))

    return token_type, value, new_pos

  def string(self, s, pos, context, pos_table):
    """
    Stream for coloring.
    """
    m = self.func(s, pos)
    if not m:
      return None, None, pos  # no match, at the same spot

    # Still need to call this for side effects.
    if self.extractor:
      self.extractor(m, context)

    # do we need a self.COLOR_TYPE?  or self.KIND
    new_pos = m.end()
    value = m.group(0)
    return self.kind, value, new_pos


class Lexer(object):
  """
  A scanner where each edge is a function.
  """
  def __init__(self, machine, use_re=False):
    """
    Args:
      use_re: Compile patterns as Python regular expressions, not CREs.
    """
    self.machine = {}  # compiled version

    # Edges are pairs of obj, state
    #
    # Instead of ReEdge and CreEdge
    #
    # ReEdge(re, func, to_state=)
    # CreEdge(re, func, to_state=)
    #
    # they all need a to_state property.

    for (state_name, edges) in machine.iteritems():
      current = []
      self.machine[state_name] = current
      for e in edges:
        # You can just send in a plain PatternEdge, without a state, and it's
        # implied that it doesn't change.
        if isinstance(e, PatternEdge):
          edge = (e, None)

        # New/old interface
        elif isinstance(e[0], str):
          edge = CompileEdgeOld(e, use_re)
        else:
          edge = e
        current.append(edge)

  # TODO: calls use .lex(), and remove this.
  def run(self, s, start='start', pos_table=None):
    return self.lex(s, start=start, pos_table=pos_table)

  def lex(self, s, channel='typed', start='start', pos_table=None):
    """
    Turn string into typed token stream.
    TODO: Should 'start' be 'Start' for consistency?

    """
    pos = 0
    token_id = 0  # tokens 0..n
    context = {}
    state = start

    while True:
      if pos == len(s):
        break

      edges = self.machine[state]

      for (obj, new_state) in edges:  # try edges in ORDER
        #print 'calling', function
        # Call the function associated with each edge.
        method = getattr(obj, channel, None)
        if method is None:
          raise Error('Object does not have method for channel %r: %s'
              % (channel, obj))

        token_type, value, new_pos = method(s, pos, context, pos_table)

        if new_pos > pos:  # did the function advance the position?
          pos = new_pos

          if new_state:  # change state if necessary
            #print ' ->', new_state
            state = new_state

          if token_type is not None:  # generate token if desired
            yield token_type, value, token_id
            token_id += 1

          # This edge was accepted.
          break
      else:
        err = MakeErrorString(s, pos)
        raise RuntimeError(
            'No tokens matched at position %d:\n%s' % (pos, err))


# TODO: Return SimpleTokenValue?
class SimpleToken(object):
  """Construct a token with the given type, and the value match.group(N).

  For example, SimpleToken('NUMBER') is a function which when called will return
  the pair ('NUMBER', string matched by regex).
  """

  def __init__(self, token_type, group=0):
    self.token_type = token_type
    self.group = group

  def __call__(self, match, context):
    return (self.token_type, match.group(self.group))


def SimpleTokenType(match, context):
  """Just return what was matched as the type."""
  return (match.group(0), None)


class MappedTokenValue(object):
  """Construct tokens of a single type.  The value is looked up in a dictionary.
  """

  def __init__(self, token_type, lookup):
    """
    Args:
      token_type: e.g. DIGIT
      lookup: a dictionary that maps match.group(0) to the token VALUE.
    """
    self.token_type = token_type
    self.lookup = lookup

  def __call__(self, match, context):
    m = match.group(0)
    try:
      value = self.lookup[m]
    except KeyError:
      raise Error("Can't find captured string in match: %r" % m)
    return (self.token_type, value)


class MappedTokenType(object):
  """Construct tokens of multiple types.

  The type is looked up in a dictinoary.
  """

  def __init__(self, lookup, strict=False):
    """
    Args:
      lookup: a dictionary that maps match.group(0) to the token TYPE.
    """
    self.lookup = lookup
    self.strict = strict

  def __call__(self, match, context):
    m = match.group(0)
    try:
      token_type = self.lookup[m]
    except KeyError:
      if self.strict:
        raise Error("Can't find captured string in match: %r" % m)
      else:
        token_type = m
    return (token_type, None)


class ErrorToken(object):
  """Stop lexing, with an error message."""

  def __init__(self, message):
    """
    Args:
      message:
    """
    self.message = message

  def __call__(self, match, context):
    m = match.group(0)
    raise Error(self.message % m)


def CreEdge(cre_str, extractor=None, kind=None, new_state=None):
  import cre_compile
  r = cre_compile.Regex(cre_str)
  r.allow_compat()  # allow .match()
  # No new state
  return PatternEdge(r, extractor, kind=kind)


def ReEdge(re_str, extractor=None, kind=None, new_state=None):
  r = re.compile(re_str, re.VERBOSE)
  return PatternEdge(r, extractor, kind=kind)
