"""The core code for Meta parsing and compilation.

Class Hierarchy

  Exception
    Error
      SyntaxError
      InvalidAttributeKey
      InvalidConstructId
      RequiredAttributeValue
    InternalError

  Meta
    LiteralList               : internal representation of literal list
    Type                      : an instance of a Meta type string.
    VarSet                    : a collection of var/value/attribute triples
    Filesystem                : Abstraction of file I/O
    Context                   : legal tokens and chars at given scope
    Line                      : single line of Meta input
    MetaFile                  : represents a single (parsed) .meta file
    Attribute                 : single typed key-value pair in context
      FeatureAttribute        : an attribute appearing before primary
      ListAttribute           : abstract superclass of list-valued attrs
        IdListAttribute       : list of ids
        WordListAttribute     : list of words (space separated)
      IdAttribute             : id-valued attribute
      IntAttribute            : int-valued attribute
      WordAttribute           : word-valued attribute
      StrAttribute            : str-valued attribute
      TypeAttribute           : type-valued attribute
      BlockAttribute          : abstract superclass of block-valued attrs
        SimpleBlockAttribute  : collection of lines
        ComplexBlockAttribute : collection of Constructs
    Construct                 : collection of attributes
      GenericConstruct        : kind encoded in instances, not in class
    MetaLanguage              : abstract superclass of meta languages
      MetaMeta                : the meta meta language
      MetaOopl                : the oopl meta language
      MetaDoc                 : the doc meta language
    BaseLanguage              : abstract superclass of base langs
      BaseLanguageOopl        : abstract superclass of Oopl base langs
        OoplCpp               : All C++-specific info needed by meta
        OoplJava              : All Java-specific info needed by meta
        OoplPython            : All Python-specific info needed by meta
        OoplPerl              : All Perl-specific info needed by meta
        OoplJavascript        : All Javascript-specific info needed by meta
    MetaStream                : A collection of named streams of output
    MetaSegment               : A collection of contiguous source code lines
    Mapping                   : base-to-meta line number mappings
    Compiler                  : A compiler for a specific meta language.
    Progress                  : ...

Grammar Constraints:
 - attribute keys can not contain whatever context.token('lsel') is set to.
     - parseAttribute stops parsing the key when it encounters lsel.
 - token('lsel') and token('rsel') must be a single character
     - parseAttribute relies on being able to detect them with a single
       char test.
"""

import collections
import copy
import cStringIO
import inspect
import logging
import os
import glob
import pprint
import re
import StringIO
import stat
import shutil
import subprocess
import sys
import tempfile
import types

# Sets up paths to find version-specific meta code.  The env var META_VERSION
# can be used to specify the version (current, beta, pervious, ...), namely
# $METAROOT/lib/$version/{meta,python}
import metameta
metameta.Metastrap()

from metaboot.thirdparty.pyfakefs import fake_filesystem
from metaboot.thirdparty.pyfakefs import fake_filesystem_glob
from metaboot.thirdparty.pyfakefs import fake_filesystem_shutil
from metaboot.thirdparty.pyfakefs import fake_tempfile

import meta.compiler.errors
import meta.compiler.lib

import python
import auto.bootstrap

ERROR = object()
LOOKUP = object()
EMPTY = object()
REQUIRED = object()
DEFAULT = object()

class Error(Exception):

  def __init__(self, msg, attribute=None):
    super(Error, self).__init__(msg)
    self._attribute = attribute

  def attribute(self):
    return self._attribute

class InternalError(Error):
  pass

class Exiting(Error):
  pass

# ----------------------------------------------------------------------
# Exceptions raised during parsing

class ParseError(Error):
  pass

class SyntaxError(ParseError):
  pass

# ----------------------------------------------------------------------
# Exceptions raised during parsing

class CompileTimeError(Error):
  pass

class RequiredAttributeValue(CompileTimeError):
  pass

# ----------------------------------------------------------------------
# Exceptions to be sorted into parse-time or compile-time

class InvalidAttributeKey(Error):
  pass

class InvalidConstructId(Error):
  pass

class InvalidConstruct(Error):
  pass

class InvalidType(Error):
  pass

class InvariantViolated(Error):
  pass

# Use metac -l to have logs write to stdout instead.
Log = meta.compiler.lib.Logger(fp=cStringIO.StringIO())


class Meta(object):

  def rootDir(self):
    """Obtain the root directory for Meta."""
    rootdir = os.getenv('METAROOT')
    if not rootdir or not IO.exists(rootdir):
      raise meta.compiler.errors.Error('Failed to find METAROOT %s' % rootdir)
    return rootdir

  def metaPath(self, *subpaths):
    """Obtain a path relative to the meta root.

    Args:
      *subpaths: list of str

    Returns: str
    """
    return os.path.join(self.rootDir(), *subpaths)


class LiteralList(Meta):
  """Instances represent synactic literal lists."""

  LIST_RE = re.compile('List([^<]*)<(.*)>')

  def delim(self):
    return self._delim

  def data(self):
    return self._data

  def __init__(self, spec):
    """Initializer.

    Args:
      spec: str
        The meta syntax of a literal list.
    """
    super(LiteralList, self).__init__()
    match = LiteralList.LIST_RE.match(spec)
    if match:
      delim, espec = match.groups()
      if not delim:
        data = espec.split() if espec else []
        delim = ' '
      else:
        data = espec.split(delim) if espec else []
    else:
      raise meta.compiler.errors.Error('Invalid list literal %s' % spec)

    # field delim: str
    #   The delimiter between list elements.
    self._delim = delim

    # field data: list of str
    #   The elements of the list.
    self._data = data

  @classmethod
  def StartsList(cls, val):
    """Determine if the given value represents a literal list.

    Args:
      val: str
        The value to analyze.
    """
    # TODO(wmh): Need a much more efficient implementation - this is way
    # to expensive for something called very very often.
    return val.startswith('List') and len(val) > 4 and not val[4].isalnum()

  def __str__(self):
    delim = self.delim()
    data = self.data()
    if delim and delim != ' ':
      result = 'List%s<%s>' % (delim, delim.join(data))
    else:
      result = 'List<%s>' % ' '.join(data)
    return result

  def __repr__(self):
    return 'LiteralList=%s' % str(self)


class Type(Meta):
  """An abstraction of a Meta type.

  Primitive Types
   - bool
   - char (nightmare of unicode)
   - int<1> to int<128>, with what 'int' refers to be customizable.
       - implemented with masking, shifting, etc.
   - real<32>, real<64>, real<128> with 'real' being customizable.
   - complex

  Native Types

  - These are types that are mapped to baselanguage-level types where
    possible.
  - A meta class exists to define the interface, but the same interface
    is mapped onto base language functionality.
  """
  # Notes on combining the pass-by indicator ('@', '*' and '&') with
  # constness indicator ('#'):
  #   pass-by-pointer
  #      *T  var   -> pointer to T
  #      *#T var;  -> pointer to const T
  #      #*T var;  -> const pointer to T (can modify what var points to
  #
  #      &T  var;  -> reference to T
  #      &#T var;  -> reference to constant T (cannot modify T, cannot modify ref)
  #      #&T var;  -> const reference to T, but references are always const so
  #                   this is the same as &T.
  #
  #      @T  var;  -> copy of T
  #      #@T var;  -> const copy of T (which is no different than just @T)
  #      @#T var;  -> copy of const T (which is no different than just @T)
  #
  # Does it make sense to claim that Meta has any primitive types?  Maybe
  # these are just numeric examples of native types supported by Meta?
  #
  # On other hand, it is useful to be able to ask if the type is primitve
  # in situations where none of @, * or & are specified ... primitive types
  # default to @, others to * (or is & better?) 
  PRIMITIVE = {
    # bool, boolean
    'bool': {},
    # char
    'char': {},  # <1>, <2>, <4> ???
    # byte, short, int, long, longlong
    'int': {'args': ('int',)},   # <1> ... <128>
    # unsigned byte, short, int, long, longlong
    'uint': {'args': ('int',)},   # <1> ... <128>
    # float, double
    'real': {'args': ('int',)},  # <32> or <64> or <128>
    # complex
    'complex': {},
  }

  NATIVE = {
    'void': {'defparam': ''},
    # interned (const) string
    #  - It does not make sense to ever do "pass by value" semantics on an
    #    interned string, since they aren't allowed to be copied to another
    #    instance of themselves.
    'str': {},
    # mutable string
    'string': {}, # mutable string
    # map, dict, hashtable, associative array
    'map': {'args': ('*', '*')},
    # immutable ordered container: tuple
    #  - a k-tuple can be specified with either no parameters or with k params.
    #     - tuple
    #     - tuple<int,str,Person>
    'tuple': {},
    # mutable ordered container: vector, list, growable array
    'vec': {'args': ('*')},
  }

  RE = re.compile('^((?:#?\*)*#?|#?@|@#|#&|&#?)?([a-zA-Z0-9_.]+)(?:<(\S+)>)?$')

  Repository = {}

  def prefix(self):
    return self._prefix

  def base(self):
    return self._base

  def native(self):
    return self._native

  def params(self):
    return self._params

  def raw(self):
    return self._raw

  def __init__(self, raw, prefix, base, params=None, native=False):
    super(Type, self).__init__()
    # field prefix: str
    #   The pass-by and const semantics indicators.
    self._prefix = prefix

    # field base: str
    #   The base type
    self._base = base

    # field native: bool
    #   True if we are to use the base-language native type associated with
    #   this type when generating base-language code.
    self._native = native

    # field params: list of Type or int
    #   The parameters of this type, as Type instances (or ints)
    self._params = params

    # field raw: str
    #   The raw representation of the type.
    self._raw = raw

  @classmethod
  def Instance(cls, raw, allow_invalid=False):
    """Return the interned instance of a Type given a representation.

    Args:
      raw: str
        The raw type
      allow_invalid: bool
        If True, invalid types do not raise an Error.
    """
    result = cls.Repository.get(raw, None)
    if not result:
      match = cls.RE.match(raw)
      if match:
        prefix, base, params_str = match.groups()
        params = cls.ParseTypes(params_str, allow_invalid=allow_invalid)
        native = base in cls.NATIVE
        primitive = base in cls.PRIMITIVE
        if not prefix and base != 'void':
          prefix = '@' if primitive else '*'
        result = cls(raw, prefix, base, params=params, native=native or primitive)
      else:
        if allow_invalid:
          # We are to return a Type instance even though it is invalid.
          result = cls(raw, prefix=None, base=None, native=False)
        else:
          result = None
      if result:
        cls.Repository[result.raw()] = result
    if not allow_invalid:
      if not result or not result.isValid():
        raise meta.compiler.errors.InvalidType('Invalid type %s' % raw)
    return result

  @classmethod
  def ParseTypes(cls, params_str, allow_invalid=False):
    """Parse a comma-separated list of types into a list of Types.

    This parses the parameterized types associated with container classes.
    For example, a Map is of the form '*map<str,@int>', and a
    vector of vector of maps is: vec<vec<map<str,*int>>>'.  This
    method does not parse the entire type, just the portion inside the <...>,
    so for our two examples, params_str would be 'str,@int' and
    '*vec<*map<*str,*int>>' respectively.

    Args:
      params_str: str
        The comma-separated list of within-parameter types to parse.
      allow_invalid: bool
        If True, allow invalid types, otherwise raise Error.

    Returns: list of Type or None
    """
    if not params_str:
      return None
    result = []
    angles = 0
    start = 0
    i = 0
    N = len(params_str)
    sep = ','
    while True:
      if i > N:
        break
      if i == N or (params_str[i] == sep and angles == 0):
        raw = params_str[start:i]
        type_ = cls.Instance(raw, allow_invalid=allow_invalid)
        result.append(type_)
        i += 1
        start = i
      else:
        c = params_str[i]
        if c == '<':
          angles += 1
        elif c == '>':
          angles -= 1
          if angles < 0:
            # TODO(wmh): What to do when allow_invalid is true?
            raise meta.compiler.errors.Error(
              'Invalid params (too many <): %s' % params_str)
        i += 1
    return result

  def isValid(self):
    base = self.base()
    return self._native or bool(self.prefix() and base)

  def __str__(self):
    raw = self.raw()
    if raw.startswith('{#') and raw.endswith('#}'):
      result = raw
    else:
      params_str = ''
      params = self.params()
      if params:
        params_list = []
        for type_ in params:
          params_list.append(str(type_))
        params_str = '<' + ','.join(params_list) + '>'
      result = '%s%s%s' % (
        self.prefix(), self.base(), params_str)
      #if result != self.raw():
      #  result += ' [%s]' % self.raw()
    return result


class VarSet(Meta):
  """Maintain a collection of var/value/Attribute tuples."""

  def __init__(self, items=None):
    super(VarSet, self).__init__()
    # field map: dict
    #   Maps variable names to lists containing [value, attribute, delim, width]
    self._map = {}

    # Now we populate map within any data from 'items'.
    if items:
      for item in items:
        n = len(item)
        var = item[0]
        value = item[1]
        attribute = item[2] if n > 2 else None
        delim = item[3] if n > 3 else None
        width = item[4] if n > 4 else None
        self.addVar(var, value, attribute=attribute, delim=delim, width=width)

  def addVar(self, var, value, attribute=None, delim=None, width=None):
    """Add a var/value/attribute triple.

    If 'var' already exists, it is replaced with the new data.

    Args:
      var: str
        The variable to add
      value: str or list of str|MetaSegment
        The value to associate with the var
      attribute: Attribute or None
        The meta-level attribute
      delim: str or None
        Only meaningful if the value is a list, and indicates what
        str separates elements of the list.  If None, the list of values
        is assumed to represent lines (i.e. delim == '\n').  If it is, for
        example, ', ', then the values of the list are to be separated by
        comma-space.  The advantage of maintaining the value as a list with
        delimiter is that it allows us to perform line-wrapping at element
        boundaries.
      width: int or None
        The width of lines generated by this var.  Currently only meaningful
        when value is a list and delim is not None or '\n'.
    """
    self._map[var] = [value, attribute, delim, width]

  def __contains__(self, var):
    return var in self._map

  def get(self, var):
    return self._map.get(var, None) or [None] * 4

  def interpolate(self, var, values):
    """Interpolate the value of 'var'.

    Args:
      var: str
        The name of the var to interpolate.
      values: dict
        The var/value pairs to interpolate within the value of var.

    Raises:
      KeyError: If the value of the variable contains any variable references
      that are not specified in values (if the value is being interpolated,
      it must be fully interpolated).  Might be useful to allow partial
      interpolation, but that will need a different implementation.
    """
    if var in self._map:
      self._map[var][0] = self._map[var][0] % values

  def asStr(self, name='?', indent=''):
    """Format as string

    Args:
      name: str
        Name of construct
    """
    lines = []
    varmap = self._map
    for var in sorted(varmap):
      value, attribute, delim, width = varmap[var]
      if isinstance(value, str):
        value_str = value.replace('\n', '\\n')
      else:
        value_str = str(value)
      lines.append('%s%-15s = %s' % (indent, var, value_str))
      if attribute:
        lines[-1] += ' [%s.%s line %d]' % (
          name, attribute.key(), attribute.line())
    return '\n'.join(lines)


class Filesystem(object):
  """Abstract away file I/O.

  This class provides three different implementations of I/O:

   - If memory is False

      - use disk-based file I/O (i.e. normal).

   - If memory is True and fake is False

      - Use a home-grown in-memory filesystem. Although this works fine for
        generating the file contents, if we attempt to compile the contents of
        a file that represents a module, and that file imports another module,
        we would need to reimplement __import__ to have it read from the fake
        memory system.  This work has not been done, as I do not know how to
        create instances of class 'module' from a file.

      - The fake_filesystem approach below was implemented to address these
        shortcomings of this approach.

      - Once it is verified that the fake_filesystem approach will work in
        GAE, this variant should be deleted.

    - If memory is True and fake is True

      - Use the fake_filesystem (et.al.) modules to replace all file I/O with
        a full-fledged in-memory version that transparently allows us to keep
        using the same methods as the disk-version uses.

      - See the method runClassFromMemory() for an example of how code
        generated in this way can be executed and used to invoke the
        _Meta_Run() method of a specified class.
  """

  def __init__(self, memory=False, pathmap={}):
    """Initializer.

    Args:
      memory: bool
        If True, write output files into memory, not to disk.
      fake: bool
        If True, use fake_filesystem when memory is True.
      pathmap: dict or None
        Maps path strings to path content.  If content is None, it is a request
        to obtain the contents from disk (using the key as the rile path). This
        is only used when memory==True.
    """
    # field disk_filesystem: dict or None
    #   None if we are using a memory filesystem, a dict otherwise.
    #   Keys are file paths, values are bool True.
    self._disk_filesystem = {}

    # field fake_filesystem: fake_filesystem.FakeFilesystem or None
    #   If we are using an in-memory filesystem via fakefilesystem.
    self._fake_filesystem = None

    # field fake_origs: dict or None
    #   If we are using an in-memory filessytem via fakefilesystem, this
    #   maintains a mapping from conceptual module name to original module.
    self._fake_origs = None

    # field use_fake: bool
    #   If True, when implementing in-memory interface, use fake_filesystem.
    #   if False, when implementing in-memory interface, use home-grown
    #   mini emumulation instead of full-fledge fake_filesystem.
    #   If not implementing in-memory interface, this field is unused.
    self._use_fake = memory

    if memory:
      self.useMemory(pathmap=pathmap)

  MODULES_TO_REPLACE_FILESYSTEM_IN = (sys.modules[__name__],)

  def useMemory(self, pathmap={}):
    """Make this instance use memory instead of disk."""

    import os
    import glob
    import shutil
    import tempfile

    self._fake_origs = {
      'os': os,
      'open': open,
      'glob': glob,
      'shutil': shutil,
      'tempfile': tempfile,
    }

    # GAE does not support os.umask()
    def umask(val):
      return val
    os.umask = umask

    filesystem = fake_filesystem.FakeFilesystem()
    self._fake_filesystem = filesystem

    # Define the required fake filesystem.  We do this before we rebind the
    # modules so that we still have access to the real filesystem to get
    # data to populate our fake filesystem.
    for path, contents in pathmap.iteritems():
      if contents is None:
        with open(path, 'r') as fp:
          contents = fp.read()
      self.addFakeFile(path, contents=contents)
      logging.info('Populated %d bytes into %s', len(contents), path)

    # Now rebind the various modules/functions making up the IO interface
    # to their fake variants.
    faux_os = fake_filesystem.FakeOsModule(filesystem)
    # TODO(wmh): Add support for the following if unittests require it.
    # Hopefully, one can parse the meta file using the real filesystem,
    # then switch to a faux filesystem for file generation, which may allow
    # us to avoid this hackery.
    if False:
      env = os.environ
      faux_os.getenv = lambda var: env.get(var, None)
      faux_os.makedirs(faux_os.getenv('METAROOT'), 0755)
    
    faux_open = fake_filesystem.FakeFileOpen(filesystem)
    faux_glob = fake_filesystem_glob.FakeGlobModule(filesystem)
    faux_shutil = fake_filesystem_shutil.FakeShutilModule(filesystem)
    faux_tempfile = fake_tempfile.FakeTempfileModule(filesystem)

    for module in self.MODULES_TO_REPLACE_FILESYSTEM_IN:
      logging.info('REPLACING file modules in %s', module)
      module.os = faux_os
      module.open = faux_open
      module.glob = faux_glob
      module.shutil = faux_shutil
      module.tempfile = faux_tempfile

    print '***** HERE in useMemory with %s' % faux_os.getenv('METAROOT')

  def undoMemory(self):
    """Reinstate module remaps."""
    origs = self._fake_origs
    for module in self.MODULES_TO_REPLACE_FILESYSTEM_IN:
      module.os = origs['os']
      module.open = origs['open']
      module.glob = origs['glob']
      module.shutil = origs['shutil']
      module.tempfile = origs['tempfile']

  def reader(self, path, log=False):
    """Open a file for reading.

    When we are operating in memory_filesystem mode, this method checks to see
    if an entry for the path exists.  If it doesn't, the actual file is
    looked for, read into memory and associated with the path in the
    memory filessystem.  An cStringIO filehandle on that in-memory string is
    returned.

    Args:
      path: str
        The path to read.
      log: bool
        If True, log the reading of the path.

    Returns: file-like object

    Raises:
      IOError: If the file does not exist.
    """
    fp = open(path, 'r')
    self._disk_filesystem[path] = True
    if log:
      logging.info('Opening %s for reading', path)
    return fp

  def writer(self, path, log=False):
    """Open a file for writing.

    Args:
      path: str
        The path to read.
      log: bool
        If True, log the writing of the path.

    Returns: file-like object

    Raises:
      IOError: If the file does not exist.
    """
    fp = open(path, 'w')
    self._disk_filesystem[path] = True
    if log:
      logging.info('Opening %s for reading', path)
    return fp

  def close(self, fp, log=False):
    """Close a filehandle opened with reader() or writer().

    Args:
      fp: file-like object
        The filehandle to close.
      log: bool
        If True, log the closing of the path.
    """
    # We either have a writable filesystem or are using a fake filesystem.
    # (close normally)
    fp.close()

    if self._use_fake:
      # FakeFileWrapper.close() in fake_filesystem is not fully implemented,
      # in that it does not properly set fp.closed to True.  There is no
      # 'closed' defined on FakeFileWrapper, but __getattr__ delegates to
      # fp._io, which is a cStringIO.StringIO, which does have 'closed'.
      # But fp._io.close() is never called as part of FakeFileWrapper.close().
      # The code for close() should be something like:
      #   def close(self):
      #     """File close."""
      #     if self._update:
      #       self._file_object.SetContents(self._io.getvalue())
      #     self._filesystem.CloseOpenFile(self)
      #     self._io.close()  # <---- new code
      #     if self._delete_on_close:
      #       self._filesystem.RemoveObject(self.name)
      if not fp._io.closed:
        fp._io.close()

  def exists(self, path):
    """Determine if path exists.

    If using a memory filesystem, we check both it and disk.

    Args:
      path: str
        The path to check for existence.
    """
    return os.path.exists(path)

  def mkdir(self, thedir, mode=0755):
    os.mkdir(thedir, mode)

  def makedirs(self, thedir, mode=0755):
    os.makedirs(thedir, mode)

  def listdir(self, thedir):
    """Obtain the immediate child files/dirs within thedir.

    Supports in-memory filesystems.

    Args:
      thedir: str
        The directory to query.
    """
    return os.listdir(thedir)

  def summarize(self, fp=sys.stdout):
    if self._use_fake:
      fp.write('On fake_filesystem\n')
      diskfs = self._disk_filesystem
      fakefs = self._fake_filesystem
      for path in sorted(diskfs):
        fakefile = self.getFakeFile(path)
        size = len(fakefile.contents)
        fp.write('  %6d bytes: %s\n' % (size, path))
    else:
      fp.write('On-disk filesystem\n')
      fs = self._disk_filesystem
      for path in sorted(fs):
        st = os.stat(path)
        fp.write('  %6d bytes: %s\n' % (st.st_size, path))

  def addFakeFile(self, path, contents='', perms=0644, inode=None):
    """Add a new file to our fake filesystem.

    This method is only valid if we are using in-memory with fake filesystem.
    """
    if not self._use_fake:
      raise meta.compiler.errors.Error(
        'addFakeFile() only valid for fake in-memory')
    kwds = {}
    kwds['st_mode'] = stat.S_IFREG | perms
    kwds['contents'] = contents
    if inode is not None:
      kwds['inode'] = inode
    kwds['create_missing_dirs'] = True
    self._fake_filesystem.CreateFile(path, **kwds)

  def addFakeDir(self, path, perms=0755, inode=None):
    if not self._use_fake:
      raise meta.compiler.errors.Error(
        'addFakeDir() only valid for fake in-memory')
    kwds = {}
    kwds['perm_bits'] = stat.S_IFREG | perms
    if inode is not None:
      kwds['inode'] = inode
    #kwds['create_missing_dirs'] = True
    self._fake_filesystem.CreateDirectory(path, **kwds)

  def getFakeFile(self, path):
    if not self._use_fake:
      raise meta.compiler.errors.Error(
        'addFakeDir() only valid for fake in-memory')
    return self._fake_filesystem.GetObject(path)

  def runClassFromMemory(self, fqcls, args=[]):
    """Execute python code written to memory.

    This method is only applicable if we are performing fake-filesystem-based
    in-memory compilation.  Given a fully qualified class name, it finds
    the in-memory content representing the module containing the class,
    evaluates that code, finds the _Meta_Run() method from the named class,
    and executes it with 'args' as the sole argument.

    IMPORTANT: This method must be executed AFTER self.undoMemory() has been
    invoked, so that we have access to the real filesystem (alternatively,
    all of the library code must reside within the fake filesystem).

    Args:
      fqcls: str
        dot-notation fully qualified class name to execute _Meta_Run() on.
      args: list of str
        The args to pass to the _Meta_Run() method.
    """
    if not self._use_fake:
      raise meta.compiler.errors.Error(
        'addFakeDir() only valid for fake in-memory')

    parts = fqcls.split('.')
    clsname = parts.pop()
    modpath = os.path.join(
      os.getcwd(), '.meta', 'oopl', 'python', *parts) + '.py'
    contents = self.getFakeFile(modpath).contents
    globals_dict = {}
    exec(contents, globals_dict)
    cls = globals_dict['Queen']
    run = getattr(cls, '_Meta_Run')
    run([])

  def baseFile(self, metafile_path, cid, baselang, suffix=None):
    """Obtain the file path associated with a cid generated by a metafile_path.

    Args:
      metafile_path: str
        The path to a .meta file that has been compiled into baselang source.
      cid: str
        The construct to obtain a path to.  This is either a namespace (for
        python and other namespace-primary languages) or a namespace.class (for
        C++, Java, and other class-primary languages).
      baselang: BaseLanguage
        The baselanguage the code was compiled into.
      suffix: str or None
        The suffix to look for (without the '.').  If None, the default
        suffix for the baselang in question is used.
    """
    if suffix is None:
      suffix = baselang.suffix()
    rootdir = os.path.dirname(metafile_path)
    parts = cid.split('.')
    parts[-1] += '.' + suffix
    result = os.path.join(rootdir, '.meta', 'oopl', baselang.id(), *parts)
    return result

  def getPublicFiles(self, rootpath):
    """Obtain all files below rootpath that have no hidden components.

    Args:
      rootpath: str
        The path to start from.

    Returns: list of str
    """
    logging.info('Here with ROOTPATH %s', rootpath)
    result = []
    k = len(rootpath)
    fs = self._disk_filesystem
    for path in sorted(fs):
      if path.startswith(rootpath):
        subpath = path[k:]
        if '/.' not in subpath:
          result.append(path)
        else:
          logging.warning('Rejected non-public %s' % path)
      else:
        logging.warning('Rejected non-target %s', path)
    return result


# TODO(wmh): It should be possible, with a bit of rewriting, to have each
# MetaFile instance maintain a Filesystem instance (same Filesystem instance
# shared across all MetaFile instances).  There are only a few places where
# IO is used where it cannot trivially be replaced by metafile.filesystem()
IO = Filesystem(memory=False)


class Context(Meta):
  """Represents the syntactical context within which to parse a file.

  Instances of this class identify:
   - the flattened config stack
   - the constructor terminator (i.e. ';')
   - the start-of-scope indicator (i.e. ':')
   - the legal set of feature keys in the current context
   - the legal set of feature values in the current context
   - the legal set of primary keys (aka constructs) in the current context
   - the legal set of secondary keys in the current context
   - for each attribute, whether its key and/or value are optional, and
     what value to use for such optional keys/values.

  The legal set of constructs and attributes is defined, at the top-level, by
  a Schema construct that has a 'config' attribute that defines Construct
  constructs that have 'config' attributes that define Attribute constructs
  that define all of the attributes of the config.

  However, most constructs that have a scope attribute also have a config
  attribute that allows one to modify the syntax of the language within
  the scope block.  Constructs and attributes defined within such config
  blocks can override default values of attributes, define new attributes,
  delete existing attributes, define new constructs, delete existing
  constructs, etc.

  It is often necessary to (conceptually) look up the config stack hierarchy
  for Construct or Attribute attribute values.  For example, a common
  use case is when obtaining the value of an attribute of a construct, and
  the syntactic representation of that construct did not specify an explicit
  value for the attribute in question.  In such situations, we want to look
  in the 'config' block of the parent context to see if it defined the
  relevant Construct and Attribute, and provided a 'default' value within
  that Attribute.  If so, we can use it.  Otherwise, we find the parent
  construct of that construct and do the same thing with it, recursively
  until we find a default value.  This is always guaranteed to eventually
  terminate because the top-level Schema specifies defaults for everything.

  However, the above conceptual approach is far too inefficient.  Instead,
  we create Config instances that represent a flattening of the config
  stack hierarchy, mapping Construct keys to dicts of Attribute keys to
  dicts of Attribute values.
  """

  # This maps type strings (i.e. strings that appear as values in the
  # 'type' attribute of construct Attribute) to Attribute instances.
  # It is populated by *Attribute.Initialize()
  EMPTY_TYPE_MAP = {}

  def legal(self):
    return self._legal

  def syntax(self):
    return self._syntax

  def primaries(self):
    return self._primaries

  def enders(self):
    return self._enders

  def __init__(self, parent_context, legal_constructs, **kwds):
    """Iniitalizer.

    Each Construct construct has:
      - an id (the kind of construct)
      - a complex config block that contains Attribute constructs.
    Each Attribute construct has:
      - an id (the name of the attribute)
      - a type (establishes the legal values for the attribute value)
      - a default value (which may indicate that the value is required)
      - a list of abbrevs (alternative strings that can be used to identify
        the attribute instead of the id itself)
      - an indication of whether the attribute key is required
      - an indication of whether the attribute value is required
      - an indication of whether the attribute itself is required

    When parsing text within a particular context, we have a set of legal
    constructs ...

    Config Stacks:
      The legal set of constructs and attributes is defined, at the
      top-level, by a Schema construct that has a 'config' attribute that
      defines Construct constructs that have 'config' attributes that define
      Attribute constructs that define all of the attributes of the config.

      However, most constructs that have a scope attribute also have a config
      attribute that allows one to modify the syntax of the language within
      the scope block. Constructs and attributes defined within such config
      blocks can override default values of attributes, define new
      attributes, delete existing attributes, define new constructs, delete
      existing constructs, etc.

      It is often necessary to (conceptually) look up the config stack
      hierarchy looking for Construct or Attribute attribute values. For
      example, a common use case is when obtaining the value of an attribute
      of a construct, and the syntactic representation of that construct did
      not specify an explicit value for the attribute in question. In such
      situations, we want to look in the 'config' block of the parent context
      to see if it defined the relevant Construct and Attribute, and provided
      a 'default' value within that Attribute. If so, we can use it.
      Otherwise, we find the parent construct of that construct and do the
      same thing with it, recursively until we find a default value. This is
      always guaranteed to eventually terminate because the top-level Schema
      specifies defaults for everything.

      However, the above conceptual approach is far too inefficient. Instead,
      we create Config instances that represent a flattening of the config
      stack hierarchy, mapping Construct keys to dicts of Attribute keys to
      dicts of Attribute values.

    Args:
      parent_context: Config
        The Config that is active when this new Config is needed (because
        of the existence of a 'config' attribute in some construct).
      legal_constructs: list of Construct
        The list of constructs that are legal in the current scope.
      **kwds: dict
    """
    super(Context, self).__init__()
    if parent_context is None:
      syntax = {}
      consinfo_map = {}
    else:
      syntax = copy.deepcopy(parent_context.syntax())
      consinfo_map = copy.deepcopy(parent_context.consinfo())

    # field parent_context: Context
    #   The context active in ancestor scopes.
    self._parent_context = parent_context

    # field legal: list of Construct
    #   The legal constructs within the current context.
    self._legal = legal_constructs

    # field syntax: dict
    #   A dict mapping construct to attribute to attribute-attribute to value.
    #   This is populated at the end of this method.
    self._syntax = syntax

    # field consattrmap: dict
    #   Maps construct id to attribute key to Attribute instance. Lazily
    #   populated by defattr().
    self._consattrmap = {}
    # TODO(wmh): Should this start with a copy the attrmap from parent_context???

    # field consinfomap: dict
    #   Maps construct ids to dicts containing
    #     featvals = dict mapping feature values to associated feature keys
    #     featkeys = dict mapping feature ids/abbrevs to canonical feature ids
    #     secondaries = dict mapping secondary ids/abbrevs to canononical ids
    self._consinfomap = consinfo_map

    primaries = {}
    all_featkeys = {}
    all_featvals = {}
    all_secondaries = {}
    for cons_cons in legal_constructs:
      cid = cons_cons.id()
      primaries[cid] = cons_cons
      featkeys = {}
      featvals = {}
      secondaries = {}
      consinfo = consinfo_map.setdefault(
        cid,
        {'featvals': featvals,
         'featkeys': featkeys,
         'secondaries': secondaries})

      # TODO(wmh): Also need to add any abbrevs of the primary attribute to
      # primaries ... this is the set of all tokens that identify constructs.
      cdata = syntax.setdefault(cid, {})
      config = cons_cons.attr(
        'config', default=ComplexBlockAttribute.Empty)
      for attr_cons in config.value():
        aid = attr_cons.id()
        kind = attr_cons.attrval('kind')
        if kind == 'feature':
          if aid in featkeys:
            self.error(
              "Construct '%s' has two features with key '%s'" % (cid, aid))
          elif aid in featvals:
            self.error(
              "Construct '%s' has '%s' as both a feature key and value" %
              (cid, aid))
          else:
            featkeys[aid] = aid
            all_featkeys.setdefault(aid, []).append(cons_cons)

            abbrevs = attr_cons.attrval('abbrevs', defattr=None)
            if abbrevs:
              print '***** HERE with construct %s feature %s abbrevs %s' % (
                cid, aid, abbrevs)
              # for each abbrev, add featkeys[abbrev] = aid
            enum = LiteralList(attr_cons.attrval('type'))
            for enumval in enum.data():
              if enumval in featvals:
                self.error(
                  "Construct '%s' has feature value '%s' defined on both"
                  "'%s' and '%s'", line=0)
              elif enumval in featkeys:
                raise meta.compiler.errors.Error(
                  "Construct '%s' has '%s' as both a feature key and value" %
                  (cid, enumval))
              else:
                featvals[enumval] = aid
                all_featvals.setdefault(enumval, []).append(cons_cons)
        elif kind == 'secondary':
          # We first ensure that no secondary key is also a feature key,
          # feature value or primary key for any other construct in the
          # context ... this allows us to implement implicit construct
          # termination by assuming a construct ends if we are parsing
          # secondaries and encounter a feature key/value or primary key.
          if aid in all_featkeys or aid in all_featvals or aid in primaries:
            raise meta.compiler.errors.Error(
              'Construct %s has secondary %s which is also a '
              'feature/primary key/value elsewhere' % (cid, aid))
          all_secondaries.setdefault(aid, []).append(cons_cons)
          # Now record cons-specific info
          secondaries[aid] = aid
          abbrevs = attr_cons.attrval('abbrevs', defattr=None)
          if abbrevs:
            if isinstance(abbrevs, str):
              abbrevs = LiteralList(abbrevs)
              for abbrev in abbrevs.data():
                #print '**** HERE with construct %s secondary %s abbrevs %s' % (
                #  cid, aid, abbrev)
                secondaries[abbrev] = aid

        adata = cdata.setdefault(aid, {})
        for attribute in attr_cons.order():
          akey = attribute.key()
          aval = attribute.value()
          adata[akey] = aval
          if akey == 'abbrevs':
            if isinstance(aval, str):
              # TODO(wmh): Need to clean up how/when literal lists are converted
              # to/from strings.
              aval = LiteralList(aval)
            for abbrev in aval.data():
              if abbrev in cdata:
                raise meta.compiler.errors.Error(
                  "Same abbrev specified for two different constructs: '%s'" %
                  abbrev)
              else:
                cdata[abbrev] = adata

    # field primaries: dict
    #   Maps primary keys/abbrevs to Construct instances.
    self._primaries = primaries

    # field all_featkeys: dict
    #   Maps featkeys to list of Construct instances having that featkey
    self._all_featkeys = all_featkeys

    # field all_featvals: dict
    #   Maps featkeys to list of Construct instances having that featval
    self._all_featvals = all_featvals

    # field all_secondaries: dict
    #   Maps secondary keys to list of Construct instances having that
    #   secondary.
    self._all_secondaries = all_secondaries

    # field tokens: dict
    #   Maps conceptual token names to str values.
    self._tokens = {
      'scope': kwds.get('scope', ':'),
      'term': kwds.get('term', ';'),
      'end': kwds.get('end', 'end'),
      'lsel': kwds.get('lsel', '<'),
      'rsel': kwds.get('rsel', '>'),
      'remark': kwds.get('rem', '/#'),
      'blockdent': kwds.get('blockdent', 2),
      'field_prefix': kwds.get('field_prefix', '_'),
      'field_suffix': kwds.get('field_suffix', ''),
      # Why does everything go bonkers if we use 'self' instead of 'this' as
      # the key here??
      'this': kwds.get('this', 'self'),  # receiver of instance methods
      'cls': kwds.get('cls', 'cls')      # receiver of class methods
    }

    # field enders : frozen_set
    #   The set of characters that represent the end of a token.
    self._enders = frozenset(
      [' ', '\n', self.token('term')])

    # field constructs: dict
    #   Maps construct names to Construct instances.  Indicates the legal set
    #   of constructs allowed within the current scope.
    constructs = {}
    fkmap = {}
    fvmap = {}
    fkre = None
    fvre = None
    for construct in legal_constructs:
      cid = construct.id()
      constructs[cid] = construct
      #config = construct.attr('config')
      for attr_cons in construct.order():
        pass
    self._constructs = constructs

    # field res: dict
    #   Maps conceptual regular expression names to regular expressions.
    #     term:
    #       the terminator regular expression
    #     param:
    #       the attribute parameter regular expression
    #     feature_keys:
    #       matches any legal feature attribute key from any legal construct.
    #     feature_values:
    #       matches any legal feature attribute value from any legal construct.
    res = {
      'term': re.compile(
        '(?:(%s)(?: (\S+)(?: (\S+))?)?)?%s' %
        (self.token('end'), self.token('term'))),
      'termx': re.compile(
        '(?:(%s)(?: (\S+)(?: (\S+))?)?)?%s\s*$' %
        (self.token('end'), self.token('term'))),
      'aparam': re.compile(
        '%s(.*)%s' % (self.token('lsel'), self.token('rsel'))),
      'line_comment': re.compile(
        r'((?:^|\n)\s*)%s' % self.token('remark')),
      'quote_dent': re.compile(r'^\s*>\|'),
    }
    self._res = res

  def show(self, fp=sys.stdout, verbose=False):
    """Write a readable representation of myself to fp.

    Args:
      fp: file-like object
        Where to write output.
      verbose: bool
        If True, print out lots of info.
    """
    fp.write('Context:\n')
    fp.write('  Parent Context: %s\n' % self._parent_context)
    if verbose:
      fp.write('  Legal Constructs:\n')
      sep = '-' * 70
      for cons in self._legal:
        fp.write('    ' + sep)
        fp.write(cons.asStr(self, indent='    ') + '\n')
      fp.write('    ' + sep)
    else:
      fp.write('  Legal Constructs: %s\n' %
               ' '.join(sorted([cons.id() for cons in self._legal])))
    if verbose:
      fp.write('  Syntax:\n')
      syntax = self.syntax()
      for cons in sorted(syntax):
        fp.write('    %s:\n' % cons)
        attr_data = syntax[cons]
        for attr in sorted(attr_data):
          fp.write('      %s:\n' % attr)
          attrattr_data = attr_data[attr]
          for attrattr in sorted(attrattr_data):
            val = attrattr_data[attrattr]
            fp.write('        %s = %s\n' % (attrattr, val))
    fp.write('  Construct Info:\n')
    consinfo = self._consinfomap
    for cons in sorted(consinfo):
      fp.write('    %s:\n' % cons)
      consdata = consinfo[cons]
      fp.write('      featvals: %s\n' % (consdata['featvals']))
      fp.write('      featkeys: %s\n' % (consdata['featkeys']))
      fp.write('      secondaries: %s\n' % (consdata['secondaries']))
    fp.write('  Feature Keys:\n')
    all_featkeys = self._all_featkeys
    for featkey in sorted(all_featkeys):
      fp.write(
        '    %-20s: %s\n' %
        (featkey, ', '.join([c.id() for c in all_featkeys[featkey]])))
    fp.write('  Feature Values:\n')
    all_featvals = self._all_featvals
    for featval in sorted(all_featvals):
      fp.write(
        '    %-20s: %s\n' %
        (featval, ', '.join([c.id() for c in all_featvals[featval]])))
    fp.write('  Secondary Keys:\n')
    all_secondaries = self._all_secondaries
    for secondary_key in sorted(all_secondaries):
      fp.write(
        '    %-20s: %s\n' %
        (secondary_key,
         ', '.join([c.id() for c in all_secondaries[secondary_key]])))

  def consinfo(self, cons):
    """Obtain the consinfo for a given construct.

    Args:
      cons: str
        The construct id.

    Returns: dict
      Contains
        featkeys = dict
          maps key/abbrevs to key
        featvals = dict
          maps values to keys.
    """
    return self._consinfomap.get(cons, None)

  def clone(self):
    return self.__class__(self._parent_context, self._legal, **self._tokens)

  def token(self, name):
    """Obtain the token with given conceptual name.

    Raises:
      KeyError: if name does not exist in self._tokens.

    Args:
      name: str
    """
    return self._tokens[name]

  def re(self, name):
    """Obtain the regular expression with given conceptual name.

    Raises:
      KeyError: if name does not exist in self._res.

    Args:
      name: str
    """
    return self._res[name]

  def _value(self, cons, attr, key):
    """Obtain the value of 'key' for a given attribute in a given construct.

    Args:
      cons: str
        The construct name.
      attr: str
        The attribute name.
      key: str
        The key whose value is desired.

    Returns: varies
    """
    return self._syntax.get(cons, {}).get(attr, {}).get(key, None)

  def default(self, cons, attr, atype='str'):
    """Obtain the 'default' value for a given attribute in a given construct.

    Args:
      cons: str
        The construct name.
      attr: str
        The attribute name.
      atype: str
        The attribute type.  Default values are stored as strings, and must
        be converted in some situations.

    Returns: varies depending on attribute type.
    """
    result = self._value(cons, attr, 'default')
    if result == '<required>':
      # It is an error to find this value - raise an exception.
      raise meta.compiler.errors.RequiredAttributeValue(
        "Attribute %s.%s is missing a 'default' value" % (cons, attr))
    elif result == '<empty>':
      # We return a type-specific empty value.
      type_ = self.type(cons, attr)
      default_attribute = Context.EMPTY_TYPE_MAP.get(type_, None)
      if default_attribute is None:
        if type_.endswith('-list'):
          result = []
        else:
          raise meta.compiler.errors.Error(
            '<empty> is not a legal default for construct %s attribute %s '
            '(type %s)' % (cons, attr, type_))
      else:
        result = default_attribute.value()
    elif atype == 'int':
      try:
        result = int(result)
      except ValueError:
        self.error(
          "Default value '%s' cannot be converted to type '%s'" %
          (result, atype))

    return result

  def defattr(self, construct, attr, keysel=None):
    """Obtain an Attribute instance representing a default value.

    There are times were we need Attribute instances rather than just
    Attribute values (for example, when implementing Construct.attr()).
    Rather than create all of these Attribute instances when a Context
    is created, we lazily create them as they are needed ... this method
    performs that lazy creation.

    Args:
      construct: Construct
        The construct to query.
      attr: str
        The attribute name.
      keysel: str or None
        The attribute key selector, if there is one.

    Returns: Attribute
    """
    # TODO(wmh): When is atype necessary? And can't we obtain this info from
    # self directly, rather than having clients pass it in?
    cons = construct.kind()
    consattrmap = self._consattrmap
    attrmap = consattrmap.setdefault(cons, {})
    attr_can_key = self.cankey(cons, attr)
    if attr_can_key in attrmap:
      result = attrmap[attr_can_key]
    else:
      # We need to create a new Attribute representing the default value.
      attr_type = self.type(cons, attr)
      if self.kind(cons, attr) == 'feature':
        attr_type = 'feature'
      attr_value = self.default(cons, attr, atype=attr_type)
      attr_line_num = -1
      attr_col_num = -1
      result, error, exc = MetaFile.CreateNewAttribute(
        cons, attr, attr_can_key, attr_type, attr_value, keysel,
        attr_line_num, attr_col_num)
      # TODO(wmh): The 'error' string is to be dumped by MetaFile.error(),
      # but we don't have a MetaFile here.  Or do we?  Should each Context
      # maintain a link to compiler, baselang, metalang, fileparser, etc.?
      # Or do we move the relevant info some place more universally accessible
      # like this Context?
      if exc:
        raise exc
      attrmap[attr_can_key] = result
    return result

  def cankey(self, cons, attr):
    """Obtain the canonical key for a given attribute key/abbrev.

    Args:
      cons: str
        The construct name.
      attr: str
        The attribute name.

    Returns: str
      The canonical attribute key.
    """
    return self._value(cons, attr, 'Attribute')

  def type(self, cons, attr):
    """Obtain the 'type' value for a given attribute in a given construct.

    Args:
      cons: str
        The construct name.
      attr: str
        The attribute name.

    Returns: varies depending on attribute type.
    """
    return self._value(cons, attr, 'type')

  def kind(self, cons, attr):
    """Obtain the 'type' value for a given attribute in a given construct.

    Args:
      cons: str
        The construct name.
      attr: str
        The attribute name.

    Returns: str
    """
    result = self._value(cons, attr, 'kind')
    return result

  def indicatesImplicitTermination(self, key):
    """Determines if a particular attribute key identifies a construct end.

    This method is only invoked when parsing secondary attributes.  If, when
    parsing such an attribute, one encounters a feature key/value or
    primary key instead of a secondary key, we can assume that the new key
    represents a new construct instance and that the current construct is
    to be implicitly terminated.

    Args:
      key: str
        An attribute key.

    Returns: bool
    """
    return (key in self._all_featkeys or
            key in self._all_featvals or
            key in self._primaries)

BOOTSTRAP_CONTEXT = Context(None, [])


class Line(Meta):
  """Represents a line of meta syntax."""

  LINE_RE = re.compile('^([ ]*)(.*\S?)')

  def line(self):
    return self._line

  def num(self):
    return self._num

  def indent(self):
    return self._indent

  def __init__(self, line, num):
    super(Line, self).__init__()
    if line and line[-1] != '\n':
      # Hack to handle files without a terminating newline.
      # TODO(wmh): Do this more efficiently (including removing Line?)
      line += '\n'
    text_match = Line.LINE_RE.match(line)
    if not text_match:
      # TODO(wmh): Need more formal way to capture errors, determine when
      # they are warnings/errors/fatals, etc.
      raise meta.compiler.errors.SyntaxError(
        'Invalid line %d: %s' % (num, line))
    text = text_match.group(2)
    indent = len(text_match.group(1)) if text else -1

    # field line: str
    #   The original line.  This maintains the newline character (it is relied
    #   on to detect end of line so we don't have to do size checks).
    self._line = line
    if len(line) > 1 and line[-2] == ' ' and line.strip():
      self._line = line.rstrip() + '\n'
      print "WARNING: Found trailing space on line %d: '%s'" % (num, line)

    # field num: int
    #   Line number.  This is user-facing, so it starts at 1, NOT 0.
    self._num = num

    # field indent: int
    self._indent = indent

  def text(self):
    return self._line.strip()

  def __str__(self):
    return '%4d: %s%s\n' % (self.num(), ' ' * self.indent(), self.text())

  def startsNewScope(self, scope_op):
    """Determine if this line starts a new scope.

    Args:
      scope_op: str
        The special end-of-line string indicating the start of an indented block.

    Returns: bool
    """
    return self.text().endswith(scope_op)


class MetaFile(Meta):
  """Parse a .meta file into a collection of Construct instances."""

  def context(self):
    return self._context

  def filename(self):
    return self._filename

  def pos(self):
    return self._pos

  def lineno(self):
    return self._lineno

  def streams(self):
    return self._streams

  def construct(self):
    return self._construct

  def constructIs(self, value):
    self._construct = value

  def compiler(self):
    return self._compiler

  def destdir(self):
    return self._destdir

  def parser(self):
    # TODO(wmh): Replace calls to self.parser() with self.
    return self

  def __init__(self, filename, context, compiler=None, destdir=None,
               debug_level=0, raw_lines=None):
    """Initializer.

    Args:
      filename: str
        The file that this MetaFile instance is responsible for.
      context: Context
        The context at the start of this file.  We clone the passed arg
        so that we can mutate it safely without affecting caller semantics.
      compiler: Compiler
        The compiler.
      destdir: str
        Where baselang files are written.
      debug_level: int
        If greater than zero, turn on debugging.
      raw_lines: list of str or None
        The (raw) lines making up the file to be parsed.  Useful in situations
        where we are operating purely in memory, without access to a file
        system.
    """
    super(MetaFile, self).__init__()
    # field context: Context
    #   The context of this file parsing.
    #   TODO(wmh): Remove this ... to be replaced by per-construct contexts.
    self._context = context.clone()

    # field filename: str
    #   The path of the file being parsed.
    self._filename = filename

    # field compiler: Compiler
    self._compiler = compiler

    # field destdir: str
    #   Where baselanguage files are written.
    self._destdir = destdir

    # field pos: int
    #   The position within the current line (0 is first character).
    self._pos = 0

    # field lines: list of Line
    #   All lines within the file.
    self._lines = self._parseFileIntoLines(raw_lines=raw_lines)

    # field numlines: int
    #   Number of lines in the file
    self._numlines = len(self._lines)

    # field lineno: int
    #   Current line number within file.
    self._lineno = 1 # TODO(wmh): clean up the user-level vs internal off-by-one

    # field line: Line
    #   The current Line being parsed.
    self._line = self._lines[0]

    # field logmap: dict
    self._logmap = {'order': []}

    # field debug_level: int
    self._debug_level = debug_level

    # field postamble: int
    #   This is a rather esoteric field, used in a very specific situation.
    #   When constructs are being parsed within a ComplexBlock, there may be
    #   some empty lines between the last construct and the actual end of the
    #   block. We need to record this information so that when we create the
    #   ComplexBlockAttribute, we can assign it to that complex block instance.
    #   As such, this field is updated in self.parseConstruct() and used in
    #   parseAttribute to initialize a similar field in ComplexBlockAttribute.
    self._postamble = 0

    # field streams: MetaStream
    #   A collection of named streams.
    self._streams = MetaStream()

    # field construct: Construct
    #   The construct representing the entire file.  This is initialized when
    #   parseFile() is invoked.
    self._construct = None

  def metalang(self):
    return self._compiler.metalang()

  def metapath(self, baselang, subpath=None, mkdirs=False):
    """Given a relative path to a to-be-generated meta file, return its path.

    Each FileEnv knows its target directory, and from there can generate a
    hidden subdir ('.meta') with metalanguage and baselanguage subdirs.

    Args:
      baselang: BaseLanguage
        Which baselanguage we are compiling into.
      subpath: str or list of str or None
        The relative path.  If None, no subpath.
      mkdirs: bool
        If True, create any missing directories

    Returns: str
    """
    if not self.destdir():
      raise meta.compiler.errors.Error('Invoked metapath() without a destdir')
    if subpath is None:
      subpath = []
    elif isinstance(subpath, str):
      subpath = [subpath]
    result = os.path.join(
      self.destdir(), '.meta', self.metalang().id(), baselang.id(),
      *subpath)
    if mkdirs:
      dirname = os.path.dirname(result)
      if not IO.exists(dirname):
        IO.makedirs(dirname, 0700)
    return result

  def log(self, message, kind='I', line=None, pos=None, attr=None):
    if line is None:
      line = attr.line() if attr else self._lineno
    if pos is None:
      pos = attr.pos() if attr else self._pos
    entry = {'message': message, 'line': line, 'pos': pos, 'kind': kind}
    # TODO(wmh): We really don't need to store the entry in both a
    # kind-specific list and in the 'order' list ... we could just
    # increment a counter instead.
    kind_log = self._logmap.setdefault(kind, [])
    kind_log.append(entry)
    order = self._logmap['order']
    order.append(entry)
    return entry

  def debug(self, message, line=None, pos=None):
    """Write a debug message.

    Args:
      message: str
      line: int or None
      pos: int or None
    """
    if self._debug_level:
      entry = self.log(message, kind='D', line=line, pos=pos)
      num = entry['line']
      lines = self._lines
      if self._debug_level > 1:
        lineobj = lines[num-1] if num <= len(lines) else None
        msg = message[:68] + '$' if len(message) > 69 else message
        sys.stdout.write('%4d:%4d: %-69s |%s' % (
          entry['line'], entry['pos'], msg,
          lineobj.line() if lineobj else '<eof>\n'))
      else:
        sys.stdout.write('%4d: %s\n' % (
          entry['line'] + 1, message))

  def info(self, message, line=None, pos=None, attr=None):
    return self.log(message, kind='I', line=line, pos=pos, attr=attr)

  def warning(self, message, line=None, pos=None):
    return self.log(message, kind='W', line=line, pos=pos)

  def error(self, message, line=None, pos=None):
    self.debug(message, line=line, pos=pos)
    return self.log(message, kind='E', line=line, pos=pos)

  def fatal(self, message, line=None, pos=None, log=True):
    self.log(message, kind='F', line=line, pos=pos)
    self.die(log=log)

  def warnings(self):
    return self._logmap.get('W', [])

  def errors(self):
    return self._logmap.get('E', [])

  def hasErrors(self, show=True):
    """Determine if the parse has encountered errors.

    Args:
      show: bool
        If True (default), print out all log entries if errors exist.

    Returns: bool
      True if any errors exist, False if no errors exist.
    """
    if self._logmap.get('E'):
      result = True
      if show:
        self.printLog()
    else:
      result = False
    return result

  def printLog(self, indent=''):
    basefile = os.path.basename(self._filename)
    print
    print '%s%s:' % (indent, self._filename)
    for entry in self._logmap['order']:
      print '%s%s%4d: %s' % (
        indent, entry['kind'], entry['line'], entry['message'])

  def die(self, log=True):
    if log:
      self.printLog()
    raise meta.compiler.errors.Exiting('Dying')

  def showpos(self):
    print 'Line %d pos %d' % (self.lineno(), self.pos())

  def _parseFileIntoLines(self, raw_lines=None):
    filename = self._filename
    debug = False   # filename.endswith('code.meta')

    context = self.context()
    lines = []
    if raw_lines is None:
      fp = IO.reader(filename)
      try:
        raw_lines = [l.replace('\r\n', '\n') for l in fp.readlines()]
      finally:
        IO.close(fp)
    if debug:
      pprint.pprint(raw_lines)
    num = 0
    levels = [0]
    new_scope = False
    for line_str in raw_lines:
      num += 1
      # TODO(wmh): Define a addLine() method that deals with 'num' so that we
      # don't risk having the line index be out-of-sync with line number.
      line = Line(line_str, num)
      lines.append(line)
      indent = line.indent()
      #sys.stdout.write('[%-5s] %s' % (new_scope, str(line)))
      if debug:
        logging.info(
          'Line %d: indent=%s levels=%s: "%s"', num, indent, levels,
          line_str.rstrip())

      if indent == -1:
        # This is a blank line, so the status of levels and new_scope do not
        # change.
        pass
      else:
        if indent > levels[-1]:
          if new_scope:
            # We have a new indentation level.  This is only legal if
            # the last non-empty line
            levels.append(indent)
          else:
            # This is not a new indentation level, but rather text indented
            # as a continuation of a statement.
            pass
        elif indent < levels[-1]:
          # We are closing off one or more indentation levels (and ensure that
          # the current indentation matches one of them.
          try:
            pos = levels.index(indent)
            levels = levels[:pos + 1]
          except ValueError:
            print 'INDENT = "%s"  LEVELs = "%s"' % (indent, levels)
            raise meta.compiler.errors.SyntaxError(
              'Line %d of %s has an invalid indentation: %s' %
              (num, self.filename(), line.line()))
        else:
          # This line is at the same indentation as the previous line.
          pass
        new_scope = line.startsNewScope(context.token('scope'))
    return lines

  def getChar(self):
    """Obtain current character.

    Returns: char
    """
    return self._line._line[self._pos]

  def nextChar(self):
    """Advance to the next char.

    Returns: char
    """
    text = self._line._line
    if self._pos < len(text):
      self._pos += 1
      result = text[self._pos]
    else:
      self.nextLine()
      result = self.getChar()
    return result

  def getLine(self):
    """Obtain current line.

    Returns: Line
    """
    return self._line

  def nextLine(self):
    """Advance to the next line.

    SideEffects:
     - increments line count by one
     - updates pos

    Returns: Line or None
    """
    try:
      self._lineno += 1
      line = self._lines[self._lineno - 1]
      pos = line.indent()
    except IndexError:
      line = None
      pos = 0
    self._line = line
    self._pos = pos
    return line

  def gotoLine(self, num):
    """Advance to the specified line.

    Args:
      num: int
        This field is user-oriented, so it starts from 1.

    Returns: Line
    """
    if num < 1:
      num = 1
    lines = self._lines
    if num <= len(lines):
      self._lineno = num
      result = lines[num - 1]
      pos = result.indent()
    else:
      result = None
      pos = 0
    self._line = result
    self._pos = pos
    return result

  def newLine(self, text):
    """Create a new Line and add it to myself.

    Also makes that line the current line.

    Args:
      text: str
        The entire contents of the line, including newline at end.

    Returns: Line
    """
    # Suppose there are currently N lines in self._list.  We are adding
    # a new line at self._list[N] (lists index from 0), but its user-facing
    # line number is N+1 (user lines index from 1).
    lines = self._lines
    n = len(lines)
    line = Line(text, n + 1)
    lines.append(line)
    self._line = line
    self._pos = line.indent()
    return line

  def advanceToLine(self, match, start=None):
    """Advance to the line matching given regexp.

    This method is used primarily for unittesting, allowing us to easily move
    to a context-sensitive position within a meta file instead of hardcoding
    line numbers that change when new text is added to golden files.

    Raises:
      Error: if the line cannot be found.

    Args:
      match: regexp or str
        The string/regexp to apply to each line looking for a match.
        This uses re.match().
      start: int or None
        If None, start from current line.  Otherwise, set line number to
        the specified value before moving forward.  This 'start' field
        is user-oriented, so it starts from 1.

    Returns: Line
    """
    if start is None:
      line = self.getLine()
    else:
      line = self.gotoLine(start)
    lines = self._lines
    size = self._numlines
    i = line.num() - 1
    while i < size and not re.match(match, lines[i].line()):
      i += 1
    # Add one to convert internal position (i) to user facing (i+1)
    self.gotoLine(i + 1)

  def advanceToParentBlock(self, line=None):
    """Advance to the line containing indentation less than the current line.

    Args:
      line: Line or None
        The line to start from.  If None, start from current location.

    Returns: Line
    """
    # Establish our line.
    if line:
      self.gotoLine(line.num())
    else:
      line = self.getLine()
    # If the start line is blank, advance to the next non-blank line.
    while line and line.indent() < 0:
      line.nextLine()

    if not line:
      result = None
    else:
      # Establish target indentation and skip forward until we find a line
      # with less indentation.
      target_indent = line.indent()
      while True:
        line = self.nextLine()
        if line:
          indent = line.indent()
          if indent > -1 and indent < target_indent:
            break
        else:
          break
    return line

  def parseConstruct(self, indent=0, preamble=0):
    """Parse a construct at current position.

    Parsing a construct consists of:
     - Consuming zero or more feature attribute keys/values, without knowing
       which construct they apply to (yet).  Must also support parameterized
       feature attribute values, so it isn't as simple as scanning for explicit
       values.
     - identifying a primary attribute key and associated primary attribute
       value (key is always required, value may be optional).
     - validating all feature attributes against the established construct
     - parsing subsequent tokens, which can be only secondary attribute keys,
       abbrevs, or values, or a construct terminator.
     - identifying a construct terminator sequence.

    Notes:
     - The indentation of the first line establishes the bounds of the construct,
       in that it cannot exceed the collection of lines whose indentation is
       greater equal that initial line.
     - Although it would be convenient to simply scan through lines looking
       for an end-of-construct terminator, this strategy is problematic when
       literal strings exist (which might encode end-of-construct terminators).
       As such, we must detect such end-of-construct terminators in the
       context of what is currently being parsed (i.e. need to know whether we
       are parsing a literal string or not, which means we need to do in-context
       parsing of tokens)

    Args:
      indent: int
        The indentation level at which the construct appears.  If a line is
        encountered with an indent less than this, we stop parsing.  If a line
        is encountered with an indent greater than this, it is an error.
      preamble: int
        The number of blank lines before the construct considered preamble.
        This is almost always 0. It is >0 in situations where the previous
        construct was implicitly terminated with a block attribute ... the
        empty lines between the last line in this block value and the start of
        the next construct are to be considered preamble for that next
        construct, rather than spurious lines within the last block value of
        the previous construct.
    """
    context = self.context()

    # TODO(wmh): This code does NOT support multiple constructs on the same
    # line separated by ';' ... it is assumed that a new construct starts at
    # position 'indent' of the line.  We will want to generalize this.

    # Skip past blank lines, keeping a record of them (they are preamble)
    # Note that in situations where preamble is already >0, there should never
    # be any such additional blank lines.
    line = self.getLine()
    while line and line.indent() < 0:
      line = self.nextLine()
      preamble += 1

    if not line:
      # We've reached EOF (handled same way as end-of-block)
      construct = None
      self.debug('found eof (postamble=%d)' % preamble)

    else:
      self.debug('new construct: preamble %d' % preamble)

      if line.indent() > indent:
        # We've encountered the start of a construct that is indented more than
        # we are expecting ... this is a syntax error.
        self.error(
          'Found line indented %d spaces when %d spaces was expected' %
          (line.indent(), indent))

        # To recover from this error, we scan forward until we find a line whose
        # indent is <= indent, and proceed normally.
        skip = 0
        while line.indent() > indent:
          line = self.nextLine()
          skip += 1
        self.debug('Skipped %d lines to recover from error' % skip)

      if line.indent() < indent:
        # We've stepped outside of the scope within which our to-be-parsed
        # constructs reside, so there are no more constructs to parse.
        construct = None
        self.debug(
          'Found indent %d < %d (implicit construct termination)' %
          (line.indent(), indent))

      else:
        line_before_primary = line
        construct = self.parseToPrimary()
        if construct:
          construct.preambleIs(preamble)
          while True:
            secondary, termcode = self.parseAttribute(construct, indent)
            if secondary:
              if not construct.registerAttribute(secondary):
                # We've encountered two attributes with the same name.
                self.error(
                  'Encountered duplicate attribute %s' % secondary.keyStr())
            else:
              break
          if termcode:
            if termcode < 0:
              # TODO(wmh): Clean this up so that parseAttribute returns 0 when
              # implicit termination is desired, and -1 when no termination
              # has yet occurred?
              construct.termcodeIs(0)
            else:
              construct.termcodeIs(termcode)
          else:
            self.error('Encountered empty secondary and termcode')
            print 'here with %s' % str(line)

          # We stop parsing attributes in these situations:
          #   1) parseAttribute() encountered a construct terminator,
          #      and now self.getChar() == term.  This in turn can happen
          #      two ways:
          #        a) one-line construct with ';'
          #        b) multi-line construct with '[end [<cons> [<id>]]];'
          #   2) parseAttribute() encountered a line whose indentation
          #      is less than required.
          #   3) a feature key/abbrev, feature value or primary key/abbrev are
          #      encountered when a secondary key/abbrev is expected, in which
          #      case the construct is assumed to be implicitly terminated.
          #   4) eof is encountered.
          #
          # In any case, we need to adjust _line and _pos so that another
          # call to parseConstruct() will work as intended.
          line = self.getLine()
          if not line:
            # We have encountered end-of-file.
            self.debug('Found EOF (no more attributes)')
          else:
            text = line.line()

            if text[self._pos] == context.token('term'):
              # For now, we advance to the next line, but eventually we'll want
              # to support parsing multiple constructs per line.
              self.debug('Found terminator for %s' % construct.kindid())
              if text[self._pos+1] != '\n':
                self.warning(
                  'Ignoring text after construct terminator at pos %d' %
                  self._pos)
              newline = self.nextLine()

            elif line.indent() < indent:
              # Implicit end of construct. Ready to go as-is.
              self.debug('Implicit end of construct (indent < %d)' % indent)

            elif line.indent() == indent and termcode == -1:
              # We have an implicit end of construct.
              pass

            else:
              self.error('Unexpected end-of-construct scenario encountered')
              # raise meta.compiler.errors.InternalError(
              #   'Unexpected end-of-construct scenario encountered')
        else:
          # We've encountered text within the current complex block that
          # isn't a construct.  We can do either of these:
          #  1) find the end of the block and report all intervening text
          #     as unrecognized
          #  2) keep looking forward for new constructs within this block
          self.error(
            'Failed to find a legal construct for this block: expecting one of'
            ' %s' % ', '.join(sorted(context.primaries())),
            line=line_before_primary.num())
          line = self.advanceToParentBlock(line=line_before_primary)
          self.error(
            'Skipped %d lines' % (line.num() - line_before_primary.num()))
          # TODO(wmh): We need to report the contents of these skipped lines
          # so the user knows they haven't been parsed.

    self._postamble = preamble
    return construct

  def parseFile(self):
    """Parse the entire file into a single File (or Schema) construct.

    The file either has an explicit File (or Schema) construct that wraps all
    constructs, or this method creates an implicit File instance to do so.

    Returns: Construct (GenericConstruct or FileConstruct or SchemaConstruct)
    """
    self.gotoLine(1)
    filename = self._filename
    target_cons_names = ['File', 'Schema']

    # TODO(wmh): The parent should be the 'scope' block of the parent
    # File instance.
    parent = None

    constructs = self.parseComplexBlock(-1)

    if self.hasErrors():
      result = None

    elif len(constructs) == 1 and constructs[0].kind() in target_cons_names:
      # We've confirmed that an explicit File construct exists in the file.
      result = constructs[0]
      result.parentIs(parent)

    elif constructs[0].kind() in target_cons_names:
      # We have a File or Schema construct plus more constructs ... that's
      # too much.
      #
      # TODO(wmh): change this to a proper error log.
      raise meta.compiler.errors.Error(
        'Found file with multiple constructs, first of which is a Schema')

    else:
      # We create a File construct to hold the contents of the file.
      result = GenericConstruct('File', filename)
      primary = WordAttribute('File', filename, line=0, pos=0)
      scope = ComplexBlockAttribute('scope', constructs, line=0, pos=0)
      scope.postambleIs(self._postamble)
      result.registerAttribute(primary)
      result.registerAttribute(scope)
      result.termcodeIs(7)

    self.constructIs(result)
    return result

  def parseAttribute(self, construct, required_indent, keyinfo=None,
                     is_primary=False):
    """Parse an attribute key/value pair for the specified construct.

    Side Effects:
     - self.pos() is advanced to the character after the attribute value.  If
       the terminator (';') is encountered, self.pos() stays on that char.

    An attribute consists of:
     - an attribute key or key abbrev.  Keys are always ids, but abbrevs can
       be operators or ids or other words.
     - an optional attribute parameter
     - an attribute value:
        - a literal int    i.e. 42            (arbitrary precision ... code uses appropriate datastructure to hold it)
        - a literal float  i.e. 3.1415926535  (arbitrary precision ... code uses appropriate datastructure to hold it)
        - a literal string i.e. 'this is a test'
        - a literal list   i.e. List|<a|b|c>
        - a word           i.e. '_39+.@$a'    (anything except whitespace)
        - an identifier    i.e. 'age'         ([underalph][underalphanum]*)
        - a block          i.e. colon followed by newline then indented lines.

    Notes:
     - in some situations, no attribute key is needed (for feature attributes,
       and when the attribute value starts with some special character that
       uniquely identifies it relative to all other attributes in the
       construct).  Examples:

        - the 'scope' attribute of most constructs is optional because its
          value (a simple or complex block) starts with ':', and amongst all
          block-valued attributes in all constructs, the 'scope' attribute
          is marked as being optional-keyed ... if a ':<nl>' is seen when an
          attribute key is expected, we know to assume 'scope'.  NOTE: We need
          to decide how this interacts with attribute keys whose alias is ':'
          (do we require key/values to appear on the same line???)

        - the 'param' attribute of 'method' has an optional key because its
          value, of type paramlist, starts with '(', and 'param' has been
          marked as optional with '(' as its hint.

    Raises:
      Error: If any Line has no number.
      Error: If any Line has unexpected indentation
      SyntaxError: If the selector associated with the attribute is improperly
        formed.
      Error:
        If, during parsing of the attribute, an end-of-construct terminator
        is found that does not match the construct being parsed, or is
        found in unexpected position.
      InteralError: If an invalid attribute type is identified.

    Args:
      construct: Construct
        The construct for which the attribute must be legal.
      required_indent: int
        The required indentation level of the line that starts the attribute.
        An error is reported if this invariant is not maintained.
      keyinfo: three-tuple or None
        Usually, this is None, but if it is specified, it means the key portion
        of the attribute has already been parsed (and self._pos is past this
        key) so that we only need to parse the value.  If not specified, we are
        to also parse the attribute key itself.
      is_primary: bool
        True if we are parsing a primary attribute, false if we are parsing
        a secondary attribute. There is some correlation between keyinfo and
        is_primary, but they are separate for added flexibility.

    Returns: two-tuple
      [0] Attribute or None
      [1] int (the term code)
         0 = not terminated
        -1 = implicit (no terminator specified, but indentation or eof terminates)
         1 = ';'
         3 = 'end;'
         7 = 'end <cons>;'
        15 = 'end <cons> <id>;'
    """
    ckind = construct.kind()
    context = self.context()
    term = context.token('term')
    termend = context.token('end')
    scope_start = context.token('scope')
    enders = context.enders()
    line = self.getLine()
    if not line or line.indent() < required_indent:
      return None, -1
    num = line.num()
    if num is None:
      raise meta.compiler.errors.Error('line has None num')
    text = line.line()
    i = self._pos
    recursed = False

    result = None
    termcode = 0

    # Skip pre-key whitespace.  If keyinfo is not None, this instead skips
    # past pre-value whitespace.
    while text[i] == ' ':
      i += 1

    # If we are at the terminator, return None as our attribute. We do NOT
    # currently advance to the next line (seems cleaner to keep that logic
    # in parseConstruct), but may change that decision later...
    if text[i] == term:
      if keyinfo is None:
        # We are to parse a key/value attribute, but there are no more.
        # This is termcode = 1, no result.
        result = None
        termcode = 1
        cankey = None   # needed for debugging only
        atype = None    # needed for debugging only
        aval = None     # needed for debugging only
      else:
        # We are NOT to parse the key (because it has already been parsed),
        # but have encountered end-of-construct, which means there is no
        # attribute value.  This may or may not be legal, depending on how
        # the attribute in question is configured.
        #
        # For now, we assume it is an error.
        self.error(
          "Expecting an attribute value for key '%s' of construct '%s'" %
          (key, ckind))
        result = None

    elif text[i] == '\n':
      # We need to advance to the next line and ensure that it has the same
      # indentation level as the current line, then recurse.
      newline = self.nextLine()
      while newline.indent() < 0:
        newline = self.nextLine()
      if newline.indent() != required_indent:
        raise meta.compiler.errors.Error(
          '%s: Line %d expected to have indentation %d, not %d' %
          (self._filename, newline.num(), required_indent, newline.indent()))
      else:
        result, termcode = self.parseAttribute(
          construct, required_indent, keyinfo=keyinfo)
        recursed = True

    else:
      # Parse the attribute key
      is_block = False
      lsel = context.token('lsel')
      if keyinfo is None:
        key_start = i
        #while text[i] != ' ' and text[i] != '\n':
        while text[i] != ' ' and text[i] != '\n' and text[i] != lsel:
          i += 1
        key_end = None
        if text[i] == lsel:
          key_end = i
          # Consume the selector.
          while text[i] != ' ' and text[i] != '\n':
            # TODO(wmh): Allow nested <>?  If so, count the '<'s and '>'s
            i += 1

        # Detect start-of-block
        if text[i] == '\n' and text[i-1] == scope_start:
          i -= 1
          is_block = True

        # Obtain the key and (optional) keysel
        if key_end is None:
          # There is no key selector
          key_end = i
          keysel = None
        else:
          # We make sure that the selector terminated properly.
          if text[i-1] != context.token('rsel'):
            raise meta.compiler.errors.SyntaxError(
              'Found attribute %s with improperly terminated selector %s' %
              (text[key_start:key_end], text[key_end:i]))
          keysel = text[key_end + 1:i - 1]

        key = text[key_start:key_end]
        # Skip pre-value whitespace
        while text[i] == ' ':
          i += 1

      else:
        # The caller has already parsed the key, has provided us with the
        # relevant information, and invoked this method with self._pos set
        # past the key.
        key_start, key, keysel = keyinfo

      # Establish the crucial attribute info.  Note that atype will be None
      # in the following situations:
      #   a) key is 'end' or 'end;' (terminatior)
      #   b) a feature value for some other construct (indicates implicit
      #      termination of a previous construct).
      #   c) a non-attribute was encountered.
      cankey = context.cankey(ckind, key)
      atype = context.type(ckind, cankey)
      valopt = False
      genkey = cankey if cankey else key

      self.debug('  key=%s keysel=%s cankey=%s atype=%s is_block=%s' % (
        key, keysel, cankey, atype, is_block))

      # Parse the attribute value based on the first char(s)
      #  - if cankey is not a secondary key (i.e. it is a primary key/abbrev,
      #    feature key/abbrev or feature value), we've walked into another
      #    construct and this one is to be implicitly terminated ... there is
      #    no attribute value to return in this situation.
      #  - if c is '"', parse a one-line literal string.
      #  - if c is ';', terminate parsing ... only legal if the val is optional
      #  - else consume a word and check if it starts with:
      #     - 'List' (literal list)
      #     - 'end' (terminator)
      #     - digits only
      aval = None
      c = text[i]
      if not is_primary and context.indicatesImplicitTermination(genkey):
        # cankey is NOT a secondary attribute for 'construct', but IS either
        # a feature key, feature value or primary key for some construct that
        # is legal in the current context.  This is interpreted as an implicit
        # end-of-construct.
        #
        # TODO(wmh): There is a subtle issue that will need to be dealt with
        # eventually. Currently, we detect implicit terminations if we
        # encounter primary keys, feature keys, or feature values, but NOT when
        # we encounter primary key abbrevs or feature key abbrevs. The code in
        # Context.__init__() that initializes all_secondaries, all_featkeys and
        # primaries needs to be extended to add abbrevs, but we must first
        # consider the ramifications this extension has (are we assuming that
        # context._primaries does NOT include abbrevs currently?)
        #
        # TODO(wmh): There is yet another subtle issue that may need to be
        # dealt with.  Suppose we are parsing 'field syntax' in the following:
        #
        #    get field syntax : dict #:
        #      /# A dict mapping construct ...
        #
        #    get field enders : frozen_set #:
        #      /# The set of characters that represent the end of a token.
        #
        # There is an implicit termination of that construct when we encounter
        # the line defining 'field enders'.  The blank line between the
        # two constructs is more properly placed as preamble to 'field enders',
        # but without special treatment, will instead be a trailing empty
        # line within the 'comment' block of 'field syntax'.
        termcode = -1
        i = self._pos

      elif key == termend or key == termend + term:
        termcode = 3  # 'end' and ';'
        term_re = context.re('term')
        term_match = term_re.search(text)
        if term_match:
          if term_match.start() + len(termend) + 1 == i:
            end_, kind_end, id_end = term_match.groups()
            if kind_end and kind_end != ckind:
              self.error(
                'Parsing %s but encountered terminator for construct %s' %
                (construct.kindid(), kind_end))
              i = term_match.start() + len(term_match.group()) - 1
              aval = None
              # raise meta.compiler.errors.Error(
              #   'Parsing %s but encountered terminator for construct %s' %
              #   (construct.kindid(), kind_end))
            elif id_end and id_end != construct.id():
              self.error(
                'Parsing %s but encountered terminator for %s %s' %
                (construct.kindid(), ckind, id_end))
              termcode = 0xf;
              i = term_match.start() + len(term_match.group()) - 1              
              # raise meta.compiler.errors.Error(
              #   'Parsing %s but encountered terminator for %s %s' %
              #   (construct.kindid(), ckind, id_end))
            else:
              # Valid termination.
              if kind_end:
                termcode |= 0x4
                if id_end:
                  termcode |= 0x8
              i = term_match.start() + len(term_match.group()) - 1
          else:
            raise meta.compiler.errors.Error(
              'Found terminator end, but at unexpected position %d' %
              term_match.start())

      elif c == term:
        # End of construct (no value).
        if valopt:
          # No value is required ... and we use the default instead.
          # We call default() here because the special value '<assigned>' will
          # create an auto-generated unique id, and we don't want spurious
          # ids created.
          aval = context.default(ckind, key, atype=atype)

      elif c == '\"' or c == "'":
        # Parse a literal string.
        val_start = i
        i += 1
        n = len(text)
        while i < n:
          if text[i] == c and text[i-1] != '\\':
            break
          i += 1
        if i >= n:
          # We failed to find the closing delimiter on the line. Index
          # i is past the newline, so we subtract 2 to get it to the
          # character before the newline. We set aval to None to
          # indicate an error.
          i = n - 2
          self.error('Failed to find closing delimiter %s' % c)
          aval = None
        else:
          i += 1
          aval = text[val_start:i]
        # Next char must be a space or terminator?

      elif c == scope_start:
        # We have a block to parse - woohoo!

        # The block may start with any number of empty lines, but we want the
        # first non-empty line to establish the indentation of the block. The
        # Line instances have an indent of -1 for empty lines so that we can
        # easily skip them.
        superdent = line.indent()
        line = self.nextLine()
        if atype == 'simple':
          aval = self.parseSimpleBlock(superdent)
        elif atype == 'complex':
          aval = self.parseComplexBlock(superdent)
        elif atype is None:
          # An unknown block-typed attribute has been encountered.  We report
          # the error and advance past the block to continue parsing.
          print (
            "Invalid atype '%s' when parsing block-typed attribute %s.%s" %
            (atype, ckind, cankey))
          lines = self.parseSimpleBlock(superdent)
          self.error(
            'Found unknown block-typed attribute %s.%s (skipping)' %
            (ckind, key))
          print '\n'.join(lines)
          aval = None
        else:
          raise meta.compiler.errors.InternalError(
            "Invalid atype '%s' when parsing block" % atype)
        line = self.getLine()
        i = self.pos()

      else:
        # Consume a word and decide from there.  We stop when we encounter a
        # blank, newline, construct terminator or (scope start followed by one
        # of the above three).
        #
        # TODO(wmh): By stopping when we encounter scope start (usually ':'),
        # we preclude any attribute value from containing it (unless it is in
        # a literal string).  May be problematic (or not).
        word_start = i
        while text[i] not in enders:
          i += 1
        if text[i-1] == scope_start:
          i -= 1
        word = text[word_start:i]
        if LiteralList.StartsList(word):
          aval = LiteralList(word)
        else:
          aval = word

      # self.debug('  aval=%s' % repr(aval))

      # Update the current char position
      self._pos = i

      # Type check.
      if termcode:
        result = None
      elif aval is None:
        # This indicates that an error was found while parsing the attribute
        # value.  The error recovery semantics is to advance past the offending
        # attribute and continue parsing the next attribute, so we recursively
        # invoke parseAttribute()
        print '***** HERE WITH AVAL None: pos=%d (%s)' % (self._pos, text[self._pos])
        
        result, termcode = self.parseAttribute(
          construct, required_indent, keyinfo=None, is_primary=False)
      else:
        result, error, exc = self.CreateNewAttribute(
          ckind, key, cankey, atype, aval, keysel, num, key_start,
          self._postamble)
        if error:
          self.error(error)
        if exc:
          raise exc

    return result, termcode

  @classmethod
  def CreateNewAttribute(cls, ckind, attr_key, attr_can_key, attr_type,
                         attr_value, attr_key_selector, attr_line_num,
                         attr_key_start, postamble=None):
    """Create a new Attribute instance from details.

    Args:
      ckind: str
        The construct kind.
      attr_key: str
        The attribute key or abbrev.
      attr_can_key: str
        The canonical attribute key.
      attr_type: str
        The attribute type.
      attr_value: any?
        The attribute value.
      attr_key_selector: str or None
        The attribute key selector, if there is one.
      attr_line_num: int
        The line number at which the attribute appears
      attr_key_start: int
        The line column at which the attribute key starts
      postamble: str or None
        The postamble of this construct.

    Returns: tuple<any,str,Exception>
    """
    result = None
    error = None
    exc = None
    if attr_type is None:
      error = (
        "Attribute key '%s' has unknown type" % attr_can_key)
      result = None
    elif attr_value is None:
      error = (
        "Expecting an attribute value for key '%s' within construct '%s'" %
        (attr_can_key, ckind))
      result = None
    elif attr_type == 'id':
      result = IdAttribute(
        attr_can_key, attr_value, litkey=attr_key,
        selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
    elif attr_type == 'str':
      result = StrAttribute(
        attr_can_key, str(attr_value), litkey=attr_key,
        selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
    elif attr_type == 'word':
      result = WordAttribute(
        attr_can_key, attr_value, litkey=attr_key,
        selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
    elif attr_type == 'simple':
      result = SimpleBlockAttribute(
        attr_can_key, attr_value, litkey=attr_key,
        selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
    elif attr_type == 'complex':
      result = ComplexBlockAttribute(
        attr_can_key, attr_value, litkey=attr_key,
        selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
      result._postamble = postamble
    elif attr_type == 'type':
      try:
        result = TypeAttribute(
          attr_can_key, attr_value, litkey=attr_key,
          selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
      except InvalidType as e:
        error = str(e)
    elif attr_type == 'feature':
      result = FeatureAttribute(
        attr_can_key, attr_value, litkey=attr_key,
        selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
    elif attr_type == 'int':
      try:
        attr_value = int(attr_value)
        result = IntAttribute(
          attr_can_key, attr_value, litkey=attr_key,
          selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
      except ValueError:
        error = (
          "Value '%s' of attribute '%s' for construct '%s' is not of type '%s'"
          % (attr_value, attr_can_key, ckind, attr_type))
        result = None
    elif attr_type.endswith('-list'):
      if attr_type == 'word-list':
        result = WordListAttribute(
          attr_can_key, attr_value, litkey=attr_key,
          selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
      elif attr_type == 'id-list':
        result = IdListAttribute(
          attr_can_key, attr_value, litkey=attr_key,
          selector=attr_key_selector, line=attr_line_num, pos=attr_key_start)
      else:
        exc = Error('Have not yet implemented list type %s' % attr_type)
    else:
      error = (
        "Unknown attribute type '%s' for key '%s' of construct '%s'" %
        (attr_type, attr_key, ckind))
      # TODO(wmh): Define a special Exception subclass that can contain as one of
      # its fiels any other exception, so that we can store the offending issue.
      exc = Error(error)
    return result, error, exc

  def parseToPrimary(self, generic=True, features=None):
    """Consume forward until a primary construct key/value is found.

    This method should only be called when we are expecting a new construct.
    It consumes space-delimited tokens until a token is encountered that
    represents a primary attribute key (the set of legal keys/abbrevs is
    dictated by the current context).  The primary attribute value is also
    consumed (if it exists ... some constructs allow for optional primary
    values).  The primary key/value are used to create a Construct instance,
    after which all of the pre-construct tokens are checked for validity with
    respect to that Construct before being added as FeatureAttribute instances.

    Notes:
     - Both feature keys and the construct key may be parameterized,
       and each such key is tokenized into a two-tuple (key, param_value).

    Args:
      generic: bool
        If True, create a GenericConstruct instance.
        If False, assume that a class for each construct exixts, and use the
        construct-specific class to create the instance.
      features: list of two-tuples
        The feature tokens parsed so far. Each element is a two-tuple
        consisting of an integer (start position of token within its line) and
        a string (the token itself). Used in situations where no primary key
        is found on a line, and we recursively invoke this method on a
        subsequent Line.

    Returns: Construct
    """
    # TODO(wmh): Switch generic=False once we've bootstrapped enough to generate
    # construct classes from schema definitions.
    result = None
    if features is None:
      features = []
    cons_key = None
    context = self.context()
    primaries = context.primaries()
    self.debug('p=%s' % ','.join(sorted(primaries.keys())))

    # Accumulate all tokens up to the first token representing a primary key.
    line = self.getLine()
    text = line.line()
    num = line.num()
    i = self._pos
    key_start = None
    while True:
      # Skip over spaces (disallow tabs, newlines are special)
      while text[i] == ' ':
        i += 1
      if text[i] == '\n':
        break
      # Scan until we encounter whitespace.
      token_start = i
      while text[i] != ' ' and text[i] != '\n':
        i += 1
      token = text[token_start:i]
      if token in primaries:
        # TODO(wmh): We need to somehow remember which token (key or abbrev)
        # was used here, so that we can regenerate the source code verbatim.
        cons_key = token
        cons_sel = None   # TODO(wmh): Support selectors on primaries!
        cons_cons = primaries[cons_key]
        consinfo = context.consinfo(cons_key)
        key_start = token_start
        self.debug('Found construct %s at line %d pos %d' % (
          cons_key, num, key_start))
        break
      else:
        features.append((token_start, token))
      if text[i] == '\n':
        break

    if cons_key:
      # We have identified a primary key, and have the Construct that defines
      # the construct we want to create.  Even though we do not yet have an
      # id for the construct, we create a Construct instance.  This will allow
      # us to use parseAttribute() to obtain the primary value, instead of
      # repeating code here.
      if generic:
        result = GenericConstruct(cons_key, None)
      else:
        # TODO(wmh): Need to dynamically find a class based on a string.
        # If all construct classes are placed in the 'constructs' module,
        # this is easily accomplished by looking in that modules __dict__.
        raise meta.compiler.errors.Error(
          'Not yet supporting non-generic construct classes')
        #cls = constructs.__dict__[cons_key]
        #result = cls(cons_id)

      # Now adjust pos and call parseAttribute() to obtain the primary value.
      self._pos = i
      primary, termcode = self.parseAttribute(
        result, line.indent(), keyinfo=(key_start, cons_key, cons_sel),
        is_primary=True)
      if not primary:
        self.error('Failed to obtain a primary attribute')

      else:
        #print 'here with %s %s %d:%d' % (primary.key(), primary.valueStr(), primary.line(), primary.pos())

        # Update state based on our parsing of the primary attribute value.
        i = self._pos
        cons_id = primary.value()
        result.idIs(cons_id)

        # Validate features and create Attribute instances.
        findex = 0
        nf = len(features)
        feature = None
        while findex < nf:
          kv_start, featkv = features[findex]
          if featkv in consinfo['featkeys']:
            # We have a feature key - next token must be a matching value.
            featval = features[findex + 1][1]
            if (findex + 1 < nf and
                consinfo['featvals'].get(featval, '') == featkv):
              feature = FeatureAttribute(featkv, featval, line=num, pos=kv_start)
              result.registerAttribute(feature)
              findex += 1
            else:
              # TODO(wmh): The value may reside on a subsequent line ... need
              # to support that at some point. Or do we?
              self.error(
                "Missing a feature value for feature key '%s'" % featkv)

          elif featkv in consinfo['featvals']:
            # We have a feature value ... key is implicit.
            featkey = consinfo['featvals'][featkv]
            feature = FeatureAttribute(featkey, featkv, line=num, pos=kv_start)
            result.registerAttribute(feature)

          elif featkv == '\n':
            # There was a newline after the last processed feature.
            if feature:
              feature.nlIs(True)

          else:
            # This is an erroneous token
            self.error(
              "Found illegal token '%s' while parsing construct '%s': %s" %
              (featkv, cons_id, ','.join(sorted(consinfo['featkeys']))))
          findex += 1

        # Register the primary attribute
        result.registerAttribute(primary)

        # Record new position
        self._pos = i

    else:
      # We failed to find a construct on this line. Advance to the next
      # line and keep parsing, unless the next line is not identically
      # indented, or doesn't exist, we've failed to find a construct.
      newline = self.nextLine()
      if newline and newline.indent() == line.indent():
        # We add a '\n' to features as an indicator that a newline occurred,
        # so that we can capture user-indentation as we parse.
        features.append((i, '\n'))
        # Recurse.
        result = self.parseToPrimary(
          generic=generic, features=features)
      else:
        # Failed to find a construct.  Error reported in caller.
        result = None

    return result

  def parseSimpleBlock(self, superdent):
    """Given a parser at a start-of-scope character, parse a simple block.

    Args:
      superdent: int
        The indentation level of the parent of this block.

    Returns: list of str
    """
    first_line = self.peekNonEmpty()
    indent = first_line.indent()

    # We strip 'indent' chars from every line before adding to the list, and
    # newlines should not be included either.
    result = []
    if indent > superdent:
      line = self.getLine()
      while line and (line.indent() >= indent or line.indent() < 0):
        ln = line.line()[indent:-1]
        result.append(ln)
        line = self.nextLine()

    return result

  def parseComplexBlock(self, superdent):
    """Given a parser just past a start-of-block, parse a block of constructs.

    Side-Effects:
      Sets self._postamble to the number of empty lines after the complex block

    Args:
      superdent: int
        The indentation level of the parent of this block.

    Returns: list of Construct
    """
    line = self.peekNonEmpty()
    indent = line.indent()
    result = []
    self.debug(
      'starting parseComplexBlock within indent %d with sub-indent %d'
      % (superdent, indent))
    preamble = 0
    if indent > superdent:
      while True:
        construct = self.parseConstruct(indent=indent, preamble=preamble)
        if construct:
          result.append(construct)

          if construct.termcode() == 0:
            # A termcode of 0 means the construct was terminated implicitly.
            # When a construct terminates implicitly, all of the blank lines
            # currently contained in the last block-valued secondary attribute
            # of that construct need to be moved out of that attribute and
            # considered preamble to the next construct (or as postamble of
            # the complex block itself if this is the last construct).
            last_secondary = construct.order()[-1]
            #print 'Encountered implicit termination for %s ... fixing up last attribute (%s)' % (
            #  construct.fullid(), last_secondary.key())

            if last_secondary.isBlock():
              # There are N empty lines at the end of this last block that
              # should be considered preamble for the *next* construct,
              # rather than extra lines within the last block-valued attribute
              # of this construct.
              lspac = 0   # last secondary post amble count
              lsvalue = last_secondary.value()
              while lsvalue and not lsvalue[-1]:
                lspac += 1
                lsvalue.pop()
            else:
              # TODO(wmh): This probably CAN happen (if someone forgets to add a
              # semicolon to the end of a construct ending with a non-block attribute).
              # But in that situation, the whitespace consumed should be
              # extractable from this method somewhere, yes?
              line = self.peekNonEmpty()
              raise meta.compiler.errors.SyntaxError(
                'Line %d of %s: Not expecting implicit termination with '
                'non-block final attribute' %
                (line.num(), self.filename()))
            preamble = lspac
          else:
            preamble = 0
        else:
          break
    else:
      # This is an empty block ... noop
      pass
    self.debug('ending parseComplexBlock: found %d constructs' % len(result))
    self._postamble = preamble
    return result

  def peekNonEmpty(self):
    """Search forward for the nearest non-empty line.

    Returns: Line or None
    """
    i = self._lineno - 1
    lines = self._lines
    while lines[i].indent() < 0:
      i += 1
    return lines[i]


class Attribute(Meta):
  """The Attribute class represents a single typed key/value pair.

  The following notes apply to all attributes
   - every attribute as an 'attribute key'
      - keys are always identifiers
      - sometimes keys are required to appear in the syntax, sometimes they
        are optional
   - every attribute as an 'attribute value'
      - each attribute value has an associated 'attribute type'
         - id
         - idlist
         - word
         - wordlist
         - str
         - param
         - simple block
         - complex block
      - sometimes values are required to appear in the syntax, sometimes they
        are optional (and auto-generated)

  There is a hierarchy of Attribute classes to support various kinds
  of attributes:
     Attribute
       IdAttribute
       WordAttribute
       IdListAttribute
       WordListAttribute
       StrAttribute
       ParamAttribute
       BlockAttribute
         SimpleBlockAttribute
         ComplexBlockAttribute
  """

  def key(self):
    return self._key

  def litkey(self):
    return self._litkey

  def line(self):
    return self._line

  def pos(self):
    return self._pos

  def dent(self):
    return self._dent

  def __init__(self, key, litkey=None, selector=None, line=None, pos=None):
    super(Attribute, self).__init__()
    # field key: str
    #   The canonical attribute key (always an identifier)
    self._key = key

    # field litkey: str or None
    #   The literal key used (may be an abbrev).  If None, use self._key
    self._litkey = litkey

    # TODO(wmh): Rename 'line' to 'num' or store the Line instance instead.

    # field line: int
    #   The line within which the attribute resides.
    #   TODO(wmh): What about attributes whose key and/or value span multiple
    #   lines?
    if line is None:
      # For the time being, it is useful to require a line, even though it is
      # positional ... helps with diagnostic errors greatly to have a line.
      print 'Attribute %s created with line None' % key
      raise meta.compiler.errors.Error(
        'Attribute %s created with line None' % key)
    self._line = line

    # field pos: int
    #   The index within line at which this attribute's key starts.
    #   TODO(wmh): Is this too space inefficient?
    self._pos = pos

    # field dent: int
    #   A bitmask encoding attribute display issues:
    #    - newline before key?
    #    - newline after key, before value?
    #    - key invisible?
    #    - newline after value?
    #    - value invisible?
    #    - value quoted?
    #  TODO(wmh): How to specify which key abbrev to display?  Do we establish
    #  an ordering on abbrevs and encode an index into that ordering within
    #  dent?
    self._dent = 0

    # optional field selector: str
    #   Many attribute keys can be followed by a "selector" to distinguish
    #   members of a family of similar attributes from one another.  The most
    #   important use-case is in 'scope' attributes, where the selector is
    #   used to identify which base language the code represents, but
    #   selectors are used in many other places too.
    #
    #   Because many attributes do not have selectors, and because there will
    #   be many selectors present at any given time, this field is made
    #   optional.
    self.selectorIs(selector)

  def value(self):
    raise NotImplementedError

  def valueStr(self):
    raise NotImplementedError

  def selector(self):
    return self.__dict__.get('_selector', None)

  def selectorIs(self, selector):
    if selector is None:
      if '_selector' in self.__dict__:
        del self._selector
    else:
      self._selector = selector

  def keyStr(self, context=None, mode=0):
    """Return the key string for this attribute.

    Normally, the keystr is simply the key itself, but when the Attribute
    has a selector, the keystr is 'name<selector>'.

    Args:
      context: Context or None
        If non-standard lsel and rsel tokens are desired, we need to know the
        context so that we can format properly. However, if a Context is not
        provided, we use the default tokens.  Note that when attributes
        are registered via ComplexBlockAttribute.registerAttribute(), we
        never pass in a context, and thus we always use the default syntax.
        This is internal memory only, so it shouldn't conflict with
        user-defined tokens ... to be seen.
      mode: int
        Determines whether to use the token specified in input or to use the
        canonical, or some hybrid, etc.

    Returns: str
    """
    if not context:
      context = BOOTSTRAP_CONTEXT
    if mode == 0:
      #result = self.litkey() or self.key()
      result = self.key()
    else:
      result = self.key()
    selector = self.selector()
    if selector is not None:
      result += '%s%s%s' % (
        context.token('lsel'), selector, context.token('rsel'))
    return result

  def keyLiteralStr(self, context=None, mode=0):
    """Return the literal key string for this attribute.

    Normally, the keystr is simply the key itself, but when the Attribute
    has a selector, the keystr is 'name<selector>'.

    Args:
      context: Context or None
        If non-standard lsel and rsel tokens are desired, we need to know the
        context so that we can format properly. However, if a Context is not
        provided, we use the default tokens.  Note that when attributes
        are registered via ComplexBlockAttribute.registerAttribute(), we
        never pass in a context, and thus we always use the default syntax.
        This is internal memory only, so it shouldn't conflict with
        user-defined tokens ... to be seen.
      mode: int
        Determines whether to use the token specified in input or to use the
        canonical, or some hybrid, etc.

    Returns: str
    """
    if not context:
      context = BOOTSTRAP_CONTEXT
    if mode == 0:
      result = self.litkey() or self.key()
    else:
      result = self.key()
    selector = self.selector()
    if selector is not None:
      result += '%s%s%s' % (
        context.token('lsel'), selector, context.token('rsel'))
    return result

  def isFeature(self):
    return False

  def isBlock(self):
    return False

  def isComplexBlock(self):
    return False

  # TODO(wmh): Remember to support optional fields (which must always have a
  # default value).
  # TODO(wmh): Since we need to encode things like which abbrev was used,
  # maybe we add this newline status to that bitmask?  Maybe the bitmask itself
  # is an optional field?

  def nl(self):
    # Optional field '_nl' is true if there was a newline after this attribute.
    return getattr(self, '_nl', False)

  def nlIs(self, nl):
    # If nl matches the default, we delete the field to save memory.
    if nl is False:
      delattr(self, '_nl')
    else:
      self._nl = nl


class FeatureAttribute(Attribute):
  """An attribute whose value is one of a pre-defined set of legal values."""

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(FeatureAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    self._value = value

  def isFeature(self):
    return True

  def valueStr(self):
    return self._value


class ListAttribute(Attribute):

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(ListAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    if not isinstance(value, LiteralList):
      raise meta.compiler.errors.SyntaxError(
        'ListAttributes expect LiteralList values, not %s' % type(value))
    self._value = value

  def valueStr(self):
    return str(self._value)


class IdAttribute(Attribute):
  """An attribute whose value is an identifier."""

  Empty = None

  @classmethod
  def Initialize(cls):
    cls.Empty = cls('empty', '', line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['id'] = cls.Empty

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(IdAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    self._value = value

  def valueStr(self):
    return self._value

IdAttribute.Initialize()


class IntAttribute(Attribute):
  """An attribute whose value is an identifier."""

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(IntAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    self._value = value

  def valueStr(self):
    return str(self._value)


class IdListAttribute(ListAttribute):
  """An attribute whose value is a list of identifiers."""

  @classmethod
  def Initialize(cls):
    empty_list = LiteralList('List!<>')
    cls.Empty = cls('empty', empty_list, line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['id-list'] = cls.Empty

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(IdListAttribute, self).__init__(
      key, value, litkey=litkey, selector=selector, line=line, pos=pos)
    # TODO(wmh): verify that each element of 'value' is an id.

IdListAttribute.Initialize()


class WordAttribute(Attribute):
  """An attribute whose value is a word (no whitespace)."""

  Empty = None

  @classmethod
  def Initialize(cls):
    cls.Empty = cls('empty', '', line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['word'] = cls.Empty

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(WordAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    if ' ' in value:
      raise meta.compiler.errors.SyntaxError(
        "WordAttribute cannot have value '%s'" % value)
    self._value = value

  def valueStr(self):
    return self._value

WordAttribute.Initialize()


class WordListAttribute(ListAttribute):
  """An attribute whose value is a list of words."""

  @classmethod
  def Initialize(cls):
    empty_list = LiteralList('List!<>')
    cls.Empty = cls('empty', empty_list, line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['word-list'] = cls.Empty

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(WordListAttribute, self).__init__(
      key, value, litkey=litkey, selector=selector, line=line, pos=pos)
    # TODO(wmh): verify that each element is a word?

WordListAttribute.Initialize()


class StrAttribute(Attribute):
  """An attribute whose value is a string (no newlines)."""

  Empty = None

  @classmethod
  def Initialize(cls):
    cls.Empty = cls('empty', '', line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['str'] = cls.Empty

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(StrAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    self._value = value

  def valueStr(self, strip=False):
    """The string representation of this value.

    Args:
      strip: bool
        If True, remove preceeding/trailing literal-string chars.
    """
    result = self._value
    if strip and result:
      c = result[0]
      if (c == '\"' or c == "'") and result[-1] == c:
        result = result[1:-1]
    return result

StrAttribute.Initialize()


class TypeAttribute(Attribute):
  """An attribute whose value is a type."""

  Empty = None

  @classmethod
  def Initialize(cls):
    cls.Empty = cls('empty', 'void', line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['type'] = cls.Empty

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(TypeAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    # TODO(wmh): allow_invalid should be configurable based on whether we
    # want strict type processing or not.
    if isinstance(value, Type):
      raise meta.compiler.errors.Error(
        'Expecting value arg of TypeAttribute to be string, not TypeAttribute')
    self._value = Type.Instance(value, allow_invalid=True)

  def valueStr(self):
    value = str(self._value)
    if ' ' in value:
      result = '"%s"' % value
    else:
      result = value
    return result

TypeAttribute.Initialize()


class BlockAttribute(Attribute):
  """An attribute whose value is a block."""

  def __init__(self, key, litkey=None, selector=None, line=None, pos=None):
    super(BlockAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)

  def isBlock(self):
    return True


class SimpleBlockAttribute(BlockAttribute):
  """An attribute whose value is a collection of Lines."""

  Empty = None

  @classmethod
  def Initialize(cls):
    cls.Empty = cls('empty', [], line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['simple'] = cls.Empty

  def value(self):
    return self._value

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(SimpleBlockAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    # TODO(wmh): There is some uncertainty about what the right way to
    # represent a simple block is:
    #   1) a multi-line string
    #   2) a list of strings
    #   3) a list of Line instances
    #
    # Currently, #3 is being used, but that is the most space-inefficient
    # of the three, and the use-cases so far suggest that it is also time
    # inefficient.  For example, when obtaining templates from constructs,
    # we *want* a string that we can instantiate into, and must form such a
    # string from the list of Lines ... terribly inefficient.
    #
    # Am leaning towards implementing simple blocks using a simple
    # multi-line string, and allowing clients to split it into lines if
    # needed.  We can also store the line count explicitly, so that it doesn't
    # need to be continually computed.

    # DEBUG
    if not isinstance(value,  list):
      raise TypeError(
          'ComplexBlockAttribute expects list-valued value: not %s' % value)
    # END DEBUG
    self._value = value

  def size(self):
    # TODO(wmh): If we switch to storing a multi-line string, we'll need to
    # store another field for size.
    return len(self.value())

  def asStr(self, context, indent=0, strip_comments=False,
            strip_leading=False, default_lines=None):
    """Return my value as a multi-line string.

    Args:
      context: Context
        Establishes comment token syntax.
      indent: int
        How much to indent each line by.
      strip_comments: bool
        If True, the one-line comment tokens are stripped from the beginning
        of each line.
      strip_leading: bool
        If True, strip leading empty lines.
      default_lines: list of str or None
        The list of lines to use if there are no lines.  If None, nothing used.

    Returns: str
    """
    dentstr = ' ' * indent
    com = context.token('remark')
    lcom = len(com)
    lines = self.value()
    if not lines:
      lines = default_lines or []
    nlines = len(lines)
    i = 0

    if strip_leading:
      while i < nlines and not lines[i].strip():
        i += 1

    if i < nlines:
      out_lines = [''] * i
      for line in lines:
        if strip_comments and line.startswith(com):
          # TODO(wmh): This will only strip at the very beginning of the line.
          # What if there is a line with some leading whitespace?  Should the
          # comment be stripped?  Replaced with spaces?
          s = lcom
          if len(line) > s and line[s] == ' ':
            s += 1
          line = line[s:]
        out_lines.append(dentstr + line + '\n')
      result = ''.join(out_lines)
    else:
      result = ''
    return result

  def asLines(self, context, indent='', width=0, mode=0):
    """Return the lines making up this simple block.

    Args:
      context: Context
      indent: str
      width: int
      mode: int
        0 = basics
        1 = ?
        2 = ?
        ...

    Returns: list of str
    """
    # TODO(wmh): Add support for width and mode.
    result = []
    for line in self._value:
      result.append(indent + line)
    return result

SimpleBlockAttribute.Initialize()


class ComplexBlockAttribute(BlockAttribute):
  """An attribute whose value is a collection of Constructs."""

  Empty = None

  @classmethod
  def Initialize(cls):
    cls.Empty = cls('empty', [], line=-2, pos=-2)
    Context.EMPTY_TYPE_MAP['complex'] = cls.Empty

  def value(self):
    return self._value

  def parent(self):
    return self._parent

  def parentIs(self, parent):
    self._parent = parent

  def postamble(self):
    return self._postamble

  def postambleIs(self, postamble):
    self._postamble = postamble

  def __init__(self, key, value, litkey=None, selector=None, line=None,
               pos=None):
    super(ComplexBlockAttribute, self).__init__(
      key, litkey=litkey, selector=selector, line=line, pos=pos)
    # DEBUG
    if not isinstance(value,  list):
      raise TypeError(
          'ComplexBlockAttribute expects list-valued value: not %s' % value)
    # END DEBUG

    # field value: list
    #   The ordered list of constructs.
    self._value = []

    # field map: dict
    #   Maps construct keys to Construct instances.  Keys must be unique.
    self._map = {}

    # field parent: Construct
    #   Each ComplexBlock maintains a pointer to its parent construct so that
    #   we can traverse the lexical stack upwards. Note that the
    #   ComplexBlockAttribute class has a prent of type Construct, and the
    #   Construct class has a prent of type ComplexBlockAttribute. This field
    #   is initialized in Construct.registerAttribute()
    self._parent = None

    # field postamble: int
    #   The number of empty lines after the last construct before the
    #   block terminates.
    #   TODO(wmh): Make this an optional field.
    self._postamble = 0

    # Populate the attribute with any specified constructs.
    for construct in value:
      self.registerConstruct(construct)

  def registerConstruct(self, construct):
    construct.parentIs(self)
    cid = construct.id()
    cmap = self._map
    if cid in cmap:
      raise meta.compiler.errors.Error(
        "Attempt to register construct '%s' when it already exists" % cid)
    else:
      self._value.append(construct)
      self._map[cid] = construct

  def isComplexBlock(self):
    return True

  def cons(self, cid, default=ERROR):
    """Obtain the construct with given id.

    If no construct with the given cid exists within this complex block,
    default is returned.  If default is ERROR, an error is raised.

    Args:
      cid: str
      default: int

    Returns: Construct or default
    """
    result = self._map.get(cid, default)
    if result is ERROR:
      raise meta.compiler.errors.InvalidConstructId(
        'Failed to find construct %s in complex block' % cid)
    return result

  def asLines(self, context, indent='', width=0, mode=0):
    """Create a list of lines representing the textual repr. of this construct.

    Args:
      context: Context
        Establishes formatting context.
      indent: str
        How much indentation before each line.
      width: int
        If non-zero, indicates the maximum width that a line can take up, in
        the generated output.
      mode: int
        0 = verbatim (regenerate what was parsed)
        1 = canonical heuristic (do not use context, use hard-coded rules for
            determining when to hide keys/values, when to newline, etc.)
        2 = canonical exact (use context and config to establish when to
            newline and what abbrevs to use, etc.)
        3 = canonical verbose (provide canonical keys/values for everything)

    Returns: list of str
    """
    result = []
    for construct in self._value:
      cons_lines = construct.asLines(
        context, indent=indent, width=width, mode=mode)
      result.extend(cons_lines)

    # The _postamble field stores how many empty lines exist at the end of
    # this block.
    for i in range(0, self._postamble):
      result.append('')

    return result

  def newChild(self, ctype, cid, feature=None, secondary=None,
               primary_type=IdAttribute, construct_type=None,
               line=0, pos=0, termcode=0):
    """Create a new instance of construct_type and add to self.

    Args:
      cid: str
        The construct id of the to-be-created construct.
      feature: list of FeatureAttribute or None
        The feature attributes, in order.
      secondary: list of Attribute or None
        The secondary attributes, in order.
      primary_type: subclass of Attribute
        The type of the primary attribute.
      construct_type: subclass of Construct
        The type of the construct to be created.
        Probably won't work for specialized construct types that do not
        accept ctype as an arg.
      line: int
        The src line number at which the new construct is affiliated.
      pos: int
        The src character pos at which the new construct is affiliated.
    """
    if construct_type is None:
      construct_type = GenericConstruct
    construct = construct_type.New(
      ctype, cid, feature=feature, secondary=secondary,
      primary_type=primary_type, termcode=termcode, line=line, pos=pos)
    self.registerConstruct(construct)
    return construct


ComplexBlockAttribute.Initialize()


class Construct(Meta):
  """A syntactic entity, consisting of a collection of attributes."""

  Empty = None

  @classmethod
  def Initialize(cls):
    cls.Empty = cls('empty')
    Context.EMPTY_TYPE_MAP['construct'] = cls.Empty

  @classmethod
  def New(cls, ctype, cid, feature=None, secondary=None,
          primary_type=IdAttribute, termcode=0, line=0, pos=0):
    """Create a new instance of myself from args.

    Args:
      cid: str
        The construct id of the to-be-created construct.
      feature: list of FeatureAttribute or None
        The feature attributes, in order.
      secondary: list of Attribute or None
        The secondary attributes, in order.
      primary_type: subclass of Attribute
        The type of the primary attribute.
      construct_type: subclass of Construct
        The type of the construct to be created.
        Probably won't work for specialized construct types that do not
        accept ctype as an arg.
      termcode: int
        The termcode for the construct.  0 is none, 15 is all.
      line: int
        The src line number at which the new construct is affiliated.
      pos: int
        The src character pos at which the new construct is affiliated.
    """
    construct = cls(ctype, cid)
    primary = primary_type(ctype, cid, line=line, pos=pos)
    if feature:
      for attr in feature:
        construct.registerAttribute(attr)
    construct.registerAttribute(primary)
    if secondary:
      for attr in secondary:
        construct.registerAttribute(attr)
    construct.termcodeIs(termcode)
    return construct

  def id(self):
    return self._id

  def idIs(self, id):
    self._id = id

  def parent(self):
    return self._parent

  def parentIs(self, parent):
    self._parent = parent

  def order(self):
    return self._order

  def attributes(self):
    return self._attributes

  def termcode(self):
    return self._termcode

  def termcodeIs(self, termcode):
    self._termcode = termcode

  def preamble(self):
    return self._preamble

  def preambleIs(self, preamble):
    self._preamble = preamble

  def __init__(self, id_, context=None):
    """Initializer.

    Notes:
     - it is important that no Construct initializer (in any subclass) register
       a primary attribute (otherwise, we'd need to insert feature attributes
       before it).

    Args:
      id_: str
      context: Context or None
    """
    super(Construct, self).__init__()
    # field id: str
    #   The unique name identifying this construct within its parent scope.
    #   TODO(wmh): Given that every construct has a primary feature attribute
    #   that stores the id, storing it here is redunant.  Maybe we instead
    #   have an explicit field for the primary attribute?  Or simply index
    #   into self._attributes, since we know the primary key (except in
    #   situations where the primary key is parameterized).
    self._id = id_

    # field parent: ComplexBlockAttribute
    #   Every construct (except the absolute root Schema construct) resides
    #   within a parent complex block.  This field is initialized in
    #   ComplexBlockAttribute.registerConstruct().
    self._parent = None

    # field order: list of Attribute
    #   The list of all Attributes making up this construct, in order specified.
    self._order = []

    # field attributes: dict mapping attribute key to Attribute instance
    self._attributes = {}

    # field termcode: int
    #   Encodes how this construct was terminated:
    #      0 (or -1) = implicit (no terminator specified, but indentation terminates)
    #      1 = ';'
    #      3 = 'end;'
    #      7 = 'end <cons>;'
    #     15 = 'end <cons> <id>;'
    #
    #   TODO(wmh): Currently, parseAttribute() returns a termcode of -1 to
    #   indicate implicitness, and 0 to indicate "not yet determined", whereas
    #   parseConstruct() uses 0 to indicate implicitness.  Need to clean up
    #   this discrepancy.
    #
    #   The above values are based on the following bit mask semantics:
    #     bit 0 = ';'
    #     bit 1 = 'end'
    #     bit 2 = <cons>
    #     bit 3 = <id>
    #   where bit i cannot be set unless all bits less than i are also set
    #   (although this last part isn't really necessary ... maybe we want
    #   to allow <id> to be present without allow <cons> to be present?)
    self._termcode = None

    # field preamble: int
    #   The number of empty lines before this construct.
    #   TODO(wmh): Make this an optional field.
    self._preamble = 0

    # field the_context: Context
    #   The context for this construct.  This defines the syntactical context
    #   within which a construct is parsed and rendered.  There is a "root"
    #   context defined by each meta-language (in the meta-language-specific
    #   schema.meta file).  Every construct has the potential to modify the
    #   context for it and all child constructs (by defining a 'config' block
    #   within the construct), but often constructs do NOT provide such a
    #   'config' block, in which case the context of this construct is the
    #   same instance as for the parent.
    self._the_context = context

  def context(self, context=None):
    """Obtain the context.

    Args:
      context: Context
    """
    # TODO(wmh): Need to figure out how to deal with context.  The current
    # code base is passing context in as args to way too many methods and
    # making things very inconvenient.  But at the same time, having every
    # single construct reserve a field for a construct might bloat constructs
    # beyond what we want (or not?).  Almost every construct would share
    # the same Context instance (we only need a new Context when a 'config'
    # attribute exists in a construct).
    if not context:
      context = self._the_context
      # TODO(wmh): Fix this ... self._the_context should never be None
      if not context:
        context = BOOTSTRAP_CONTEXT
    return context

  def parentConstruct(self):
    """Obtain my parent construct.

    Returns: Construct
    """
    block_parent = self.parent()
    if block_parent:
      result = block_parent.parent()
    else:
      result = None
    return result

  def kindid(self):
    return '<Unknown Construct Kind> %s' % self._id

  def fullid(self):
    revlist = []
    obj = self
    while obj:
      if obj.kind() == 'File':
        break
      revlist.append(obj.id())
      obj = obj.parentConstruct()
    return '.'.join(reversed(revlist))

  def fullkindid(self):
    return self.kind() + ' ' + self.fullid()

  def idPath(self):
    revlist = []
    obj = self
    while obj:
      if obj.kind() == 'File':
        break
      revlist.append(obj.id())
      obj = obj.parentConstruct()
    return '/'.join(reversed(revlist))

  def findAncestor(self, kind):
    """Find the closest ancestor of the given kind.

    If self has kind, self is returned.

    Args:
      kind: str
        The kind of the ancestor to find.

    Returns: Construct or None
    """
    target = self
    while target and target.kind() != kind:
      target = target.parentConstruct()
    return target

  def primary(self):
    """Return my primary attribute.

    Returns: Attribute
    """
    return self.attr(self.kind())

  def registerAttribute(self, attribute):
    """Register an attribute with this construct, maintaining order.

    Args:
      attribute: Attribute or None
        The attribute to register.  If None, it is a noop.

    Returns: Attribute or None
      Returns None if the passed-in attribute is None, and also when the
      specified attribute name is a duplicate.
    """
    if attribute:
      # we always use default context here, so the keys in
      # Construct._attributes always uses the default lsel and rsel tokens,
      # even when the user has provided different user-facing tokens. This
      # allows us to avoid passing Context instances everywhere just so they
      # are available here.
      key = attribute.keyStr(context=None)
      if key in self._attributes:
        attribute = None
        # raise meta.compiler.errors.SyntaxError(
        #   "Duplicate key '%s' found" % key)
      else:
        self._attributes[key] = attribute
        self._order.append(attribute)
        if attribute.isComplexBlock():
          attribute.parentIs(self)
    return attribute

  def xattr(self, key, missing=ERROR, context=None, params=None, all=False):
    """Improved version of attr().

    Obtain all attributes matching params.  Requires context be provided.

    Args:
      key: str
        The key of the attribute to return.
      missing: various
        The attribute to return if there is no attribute of the given key.
        A value of ERROR indicates that an InvalidAttributeKey exception
        should be raised. A value of EMPTY indicates that a type-specific
        empty attribute value should be returned.  A value of LOOKUP indicates
        that we should perform config lookup.
      context: Context or None
        If provided, and there is no attribute of the specified key in
        self, look it up in context.syntax()
      param: str or None
        If given, look first for a parameterized attribute, else use non
        parameterized version.
      extended: bool
        If True, perform an extended search. Look for 'key',
        'key<base>' and 'key<*>'. The precendence between these
        depends on configuration values.
      all: bool
        If True, return all

    Returns: Attribute (if all==False) or list of Attribute (all==True) or None

    Raises:
      InvalidAttributeKey: If no match against any params is found
    """
    # TODO(wmh): IMPORTANT: This method changes the semantics of various
    # default types, relative to attr(), attrval(), etc. If this works out,
    # the other methods need to be updated to share the same semantics.
    if not context or not params:
      raise meta.compiler.errors.Error(
        'Must provide real context and params to xattr')
    attrs = self._attributes
    result = []
    keys = []
    lsel, rsel = (context.token('lsel'), context.token('rsel'))
    for param in params:
      k = key
      if param:
        k += lsel + param + rsel
      keys.append(k)
    for fkey in keys:
      attribute = attrs.get(fkey, missing)
      if attribute is ERROR:
        attribute = None
      elif attribute is EMPTY:
        # If an entry exists in EMPTY_TYPE_MAP, use it, else use None
        type_ = context.type(self.kind(), fkey)
        attribute = Context.EMPTY_TYPE_MAP.get(type_, None)
      elif attribute is LOOKUP:
        attribute = context.defattr(self, fkey)
      if attribute is not None:
        if not all:
          # TODO(wmh): It is rather problematic for this method to return
          # Attribute instances sometimes and list of Attribute instance
          # other times ... this will not be implemntable in C++ or Java.
          # I suspect we'll never want all=True, so leaving it as is for
          # now and will clean up later.
          return attribute
        result.append(attribute)
    if not result and missing is ERROR:
      raise meta.compiler.errors.InvalidAttributeKey(
        'Failed to find any of %s in %s' % (keys, self.kindid()))

    if all:
      return result
    else:
      # If unique, we would have returned had we found anything.
      return None

  def xattrval(self, key, missing=ERROR, default=None, context=None,
               params=None):
    attribute = self.xattr(
      key, missing=missing, context=context, params=params, all=False)
    if not isinstace(attribute, Attribute):
      print 'Here in xattrval with attribute %s' % attribute
    if attribute is None:
      attribute = default
    return attribute

  def attr(self, key, default=ERROR, context=None, param=None):
    """Obtain the Attribute with the given key.

    Args:
      key: str
        The key of the attribute to return.
      default: various
        The value to return if there is no attribute of the given key.
        A value of ERROR indicates that an InvalidAttributeKey exception
        should be raised.
      context: Context or None
        If provided, and there is no attribute of the specified key in
        self, look it up in context.syntax()
      param: str or None
        If given, look first for a parameterized attribute, else use non
        parameterized version.
      extended: bool
        If True, perform an extended search. Look for 'key',
        'key<base>' and 'key<*>'. The precendence between these
        depends on configuration values.

    Returns: Attribute or default
    """
    result = None
    if param:
      pkey = '%s%s%s%s' % (
        key, context.token('lsel'), param, context.token('rsel'))
      result = self._attributes.get(pkey, None)
    if result is None:
      result = self._attributes.get(key, default)
    if result is ERROR:
      raise meta.compiler.errors.InvalidAttributeKey(
        'Failed to find attribute %s in construct %s' %
        (key, self.id()))
    elif result is EMPTY:
      if not context:
        raise meta.compiler.errors.Error(
          'Must provide context when default=EMPTY')
      type_ = context.type(self.kind(), key)
      try:
        result = Context.EMPTY_TYPE_MAP[type_]
      except KeyError:
        raise meta.compiler.errors.Error(
          "Failed to find Context.EMPTY_TYPE_MAP['%s'] for kind %s key %s" %
          (type_, self.kind(), key))
    elif result is LOOKUP:
      if not context:
        raise meta.compiler.errors.Error(
          'Must provide context when default=LOOKUP')
      result = context.defattr(self, key)
    return result

  def attrval(self, key, defattr=ERROR, context=None, param=None):
    """Obtain the value of the Attribute with the given key.

    Args:
      key: str
        The key of the attribute to return a value for.
      defattr: various
        What Attribute to use
      context: Context or None
        If provided, and there is no attribute of the specified key in
        self, look it up in context.syntax()
      param: str or None
        If given, look first for a parameterized attribute, else use non
        parameterized version.

    Returns: Attribute or None
    """
    # TODO(wmh): This method is called very often.  Can we optimize it?
    attr = self.attr(key, default=defattr, context=context, param=param)
    if attr:
      if isinstance(attr, Attribute):
        result = attr.value()
      else:
        result = attr
    else:
      result = None
    return result

  def attributesMatching(self, match, context=None, canonical=False):
    """Obtain all attributes whose key matches the regexp in 'match'.

    Args:
      match: regexp or str
        What to match against each attr key.
      context: Context
        The standard bugbear.
      canonical: bool
        Compare against the canonical attribute key, not whatever happens
        to exist in the stream.

    Returns: dict
      Maps attribute keys to Attribute instances.  If canonical is True,
      the keys are the canonical keys, not necessarily the keys that exist
      in the construct.
    """
    result = {}
    for akey, attribute in self._attributes.iteritems():
      if canonical:
        akey = context.cankey(self.id(), akey)
      if re.match(match, akey):
        result[akey] = attribute
    return result

  def asLines(self, context=None, indent='', width=0, mode=0):
    """Create a list of lines representing the textual repr. of this construct.

    Args:
      context: Context
        Establishes formatting context.
      indent: str
        How much indentation before each line.
      width: int
        If non-zero, indicates the maximum width that a line can take up, in
        the generated output.
      mode: int
        0 = verbatim (regenerate what was parsed)
        1 = canonical heuristic (do not use context, use hard-coded rules for
            determining when to hide keys/values, when to newline, etc.)
        2 = canonical exact (use context and config to establish when to
            newline and what abbrevs to use, etc.)
        3 = canonical verbose (provide canonical keys/values for everything)

    Returns: list of str
    """
    if not context:
      context = self.context()

    # TODO(wmh):
    #  - we need to pass in a Context instance so that we know how to
    #     - terminate constructs
    #     - start/end blocks
    #     - etc.
    #  - we need to pass in a Config instance so that we can
    #     - determine canonical information (order, abbrevs, padding, etc.)
    #
    #  - this may not be the right place for this method ... maybe it belongs
    #    in MetaFile or in an OutputFile class.
    #
    # TODO(wmh): Support mode!

    if width <= 0:
      width = 1000000

    lines = [''] * self.preamble()

    current = indent
    dentlen = len(indent)
    attr = None
    for attr in self.order():
      clen = len(current)
      extra_space = 0 if clen == dentlen else 1

      # TODO(wmh): determine whether to use the akey or an abbrev.
      if attr.isFeature():
        keystr = ''
      else:
        keystr = attr.keyLiteralStr(context, mode=mode)
      keylen = len(keystr)

      # Establish whether the key/value string can fit on the current line,
      # or whether we need to move to the next line.
      if attr.isBlock():
        # We have a block-valued attribute.  We see if we can fit the key
        # on current line (if not, add it on next line).  Then add all lines
        # making up the block itself, properly indented.
        scope_start = ':'  # TODO(wmh): context.token('scope')
        scope_indent = '  '  # TODO(wmh): context.token('scopedent')
        attrstr = keystr + scope_start
        attrlen = len(attrstr)
        if clen + extra_space + attrlen <= width:
          # We can fit the block key (and scope start) on the current line.
          if extra_space:
            current += ' '
          current += attrstr
        else:
          # We cannot fit the block on the current line.
          lines.append(current)
          current = indent + attrstr
        lines.append(current)
        lines.extend(
          attr.asLines(context, indent=indent + scope_indent, width=width, mode=mode))
        current = indent

      else:
        # We do NOT have a block value, so we simply form the attribute string
        # and see if it fits on the current line or not.
        attrstr = keystr
        if attrstr:
          attrstr += ' '
        avs = attr.valueStr()
        attrstr += attr.valueStr()
        attrlen = len(attrstr)
        if clen + extra_space + len(attrstr) <= width:
          if extra_space:
            current += ' '
          current += attrstr
          if attr.nl():
            # This attribute is marked as having a newline after it, so we add
            # said newline to our output stream.
            lines.append(current)
            current = indent
        else:
          lines.append(current)
          current = indent + attrstr
    if attr and attr.isBlock():
      # Last attribute is a block, so we end cleanly
      # TODO(wmh): Generalize this to support other terminator syntaxes.
      if current.strip():
        # If we have a current line with actual text, print it out before
        # printing the end-of-construct line afterwards.
        lines.append(current)
      ts = self.termstr(context)
      if ts:
        lines.append(indent + ts)

    else:
      # Last attribute is not a block, so we can terminate more simply.
      if current:
        current += ';'
        lines.append(current)
      else:
        lines[-1] += ';'
    return lines

  def asStr(self, context=None, indent='', numbered=False, mode=0):
    if numbered:
      result = '\n'.join(
        ['%4d: %s' % (i, line) for i, line in
         enumerate(
           self.asLines(context=context, indent=indent, mode=mode), start=1)])
    else:
      result = '\n'.join(self.asLines(context=context, indent=indent))
    return result

  def dump(self, fp=sys.stdout, indent='', context=None):
    fp.write('%sConstruct %s %s\n' % (indent, self.kind(), self.id()))
    for attr in self.order():
      fp.write(
        '  %s%-10s = %s\n' %
        (indent, attr.keyStr(context=context), attr.valueStr()))

  def termstr(self, context=None):
    """The terminator string to use for self.

    Args:
      context: Context
    """
    if not context:
      context = self.context()
    termcode = self.termcode()
    if termcode <= 0:
      result = ''
    else:
      items = []
      if termcode & 0x2:
        items.append(context.token('end'))
      if termcode & 0x4:
        items.append(self.kind())
      if termcode & 0x8:
        items.append(self.id())
      result = ' '.join(items)
      if termcode & 0x1:
        result += context.token('term')
    return result

  def __str__(self):
    # We do NOT use this method to provide canonicalization, because there
    # are far too many configuration options.  Instead, this is used when
    # a basic identification of the construct is desired.
    return '<%s>' % self.kindid()

  def scope(self, context=None, selectors=None, default=EMPTY):
    """Obtain a 'scope' attribute.

    Args:
      context: Context
      selectors: list of str
        The list of selectors to consider.  If empty, obtain 'scope' without
        selectors.  If not empty, one must provide the empty string to
        search the non-selector variant.
      default: int
        Establishes how to handle missing scopes.
    """
    context = self.context(context)
    result = None
    if selectors is None:
      # TODO(wmh): If we have a baselanguage, selectors should be
      # baselang.allids(empty=False)
      selectors = ['']
    elif isinstance(selectors, str):
      selectors = [selectors]
    lsel = context.token('lsel')
    rsel = context.token('rsel')
    # TODO(wmh): A much more efficient strategy here would be to map all of
    # the baselang-specific variants scope<Yk> to a canonical scope<Y> at
    # time of parsing.  This would be more efficient, and would also ensure
    # that a compile-time error occurs if someone specifies, for example,
    # both scope<python> and scope<py>.
    for selector in selectors:
      if selector:
        attrname = 'scope%s%s%s' % (lsel, selector, rsel)
      else:
        attrname = 'scope'
      result = self.attr(attrname, default=None)
      if result:
        break
    if result is None:
      if default == EMPTY:
        # We need to return the empty block for this block type.
        scope_type = context.type(self.kind(), 'scope')
        try:
          result = Context.EMPTY_TYPE_MAP[scope_type]
        except KeyError as e:
          pprint.pprint(context._syntax)
          raise meta.compiler.errors.InternalError(
            'No %s entry in EMPTY_TYPE_MAP (%s)' % (scope_type, self.kind()))
      elif default == ERROR:
        raise meta.compiler.errors.Error('Failed to find a scope')
      elif default == REQUIRED:
        raise meta.compiler.errors.RequiredAttributeValue('Expecting a scope')
      else:
        result = default
    return result

  def child(self, cid, selectors=None, kind=None):
    """Obtain a child from scope.

    Raises:
      InvalidConstruct: If kind != result.kind()

    Args:
      cid: str
        The id of the child to obtain
      selectors: str
        Identifies which scope to obtain
      kind: str or None
        If not None, the kind of the resulting construct must match.

    Returns: Construct or None
    """
    context = self.context()
    scope = self.scope(context=context, selectors=selectors)
    result = scope.cons(cid, default=None)
    if kind and result and result.kind() != kind:
      raise meta.compiler.errors.InvalidConstruct(
        'Child %s of %s expected to be %s not %s' %
        (name, self.fullid(), kind, result.kind()))
    return result

  def span(self, attr=None):
    """The line.col span of this attribute.

    Args:
      attr: str
        The attribute key/abbrev for which position information is desired.

    Returns: str
    """
    start = str(self.line())
    if self.pos() > -1:
      start += '.' + self.pos()

    end = 0
    return '%d.%d-%d' % (
      self.line(), self.pos(), end)

Construct.Initialize()


class GenericConstruct(Construct):

  BOOTSTRAP_MAP = {
    'Attribute': ('Attribute', IdAttribute),
    '=': ('default', StrAttribute),
    'default': ('default', StrAttribute),
    ':': ('type', StrAttribute),
    'type': ('type', StrAttribute),
    'abbrevs': ('abbrevs', WordListAttribute),
  }

  def kind(self):
    return self._kind

  def __init__(self, kind, id):
    super(GenericConstruct, self).__init__(id)
    # field kind: str
    #   The construct kind.  Normally, each kind of construct has an
    #   associated subclass of Construct, and instances of a specific kind
    #   of Construct are instances of that subclass.  However, in some
    #   situations it is useful to support a more generic mechanism for
    #   describing constructs.
    self._kind = kind

  def kindid(self):
    return '%s %s' % (self._kind, self.id())

  @classmethod
  def BootstrapConstruct(cls, cid, spec):
    result = GenericConstruct('Construct', cid)
    unused_primary = result.registerAttribute(
      IdAttribute('Construct', cid, line=-1, pos=-1))
    config = result.registerAttribute(
      ComplexBlockAttribute('config', [], line=-1, pos=-1))

    lineno = 0
    for attr_line in spec.strip().split('\n'):
      lineno += 1
      attribute = cls.BootstrapAttribute(attr_line, lineno)
      if attribute:
        config.registerConstruct(attribute)

    result.termcodeIs(15)
    return result

  @classmethod
  def BootstrapAttribute(cls, attr_line, lineno):
    """Given a one-line description of an attribute, create an Attribute.

    Args:
      attr_line: str
        The line.
      lineno: int
        The line number of the line.

    Returns: Attribute or None
      Returns None if attr_line is empty.
    """
    data = attr_line.strip().rstrip(';').split()
    if not data:
      return None
    cls_map = cls.BOOTSTRAP_MAP
    aindex = data.index('Attribute')
    features = data[:aindex]
    secondaries = data[aindex:]
    aid = data[aindex + 1]

    attribute = GenericConstruct('Attribute', aid)

    if features:
      fi = 0
      if features[0] in ['critical', 'major', 'medium', 'minor']:
        attribute.registerAttribute(
          FeatureAttribute('priority', features[0], line=lineno, pos=-1))
        fi += 1
      attribute.registerAttribute(
        FeatureAttribute('kind', features[fi], line=lineno, pos=-1))
      if len(features) > fi + 1:
        attribute.registerAttribute(
          FeatureAttribute(
            'parameter', features[fi + 1], line=lineno, pos=-1))
        if len(features) > fi + 2:
          raise meta.compiler.errors.Error(
            'Unknown features: %s' % features[fi + 2:])
    if len(secondaries) % 2 != 0:
      raise meta.compiler.errors.Error(
        'Line %d for %s: Odd number of secondaries: %s' %
        (lineno, cid, str(secondaries)))
    i = 0
    while i < len(secondaries):
      key = secondaries[i]
      value = secondaries[i+1]
      # TODO(wmh): Generalize this .. ensure that all list literals have
      # list attribute types.
      if value.startswith('List') and key == 'abbrevs':
        value = LiteralList(value)
      if key in cls_map:
        fullkey, acls = cls_map[key]
        attribute.registerAttribute(acls(fullkey, value, line=lineno, pos=-1))
      else:
        raise meta.compiler.errors.Error('Invalid secondary key %s' % key)
      i = i + 2
    return attribute

  def compileMeta(self, metafile, baselang, config):
    """Compile myself into baselang source code.

    Any CompileTimeError exception that is raised by this call chain is
    captured and converted to an error reported on metafile.  Exceptions
    outside the CompileTimeError are raised normally.

    Args:
      metafile: MetaFile
        Provides access to parser, compiler and baselang.
      baselang: BaseLanguage
        What language to compile into.
      config: dict
        In case context-sensitive configuration information is needed.  One use
        of this is to indicate when we are compiling unittest methods as
        opposed to actual methods, without having to resort to state in the
        'context' object (which would preclude certain kinds of parallelizing.
    """
    # TODO(wmh): Support construct-specific classes. It is true that
    # the following reflexive approach to delegating *all* construct
    # compilation into the BaseLanguage hierarchy means that one of
    # the primary reasons for creating construct-specific classes is
    # no longer necessary, having such classes (as metaclasses) is
    # useful for many other reasons. Create construct-specific
    # subclasses of Construct for every construct defined in a schema.
    Log.info('compileMeta: %s', self.kindid())
    Log.indent()

    baselang.envIs(metafile)  # wmh experiment 2013/12/24

    # Before we compile this construct, we invoke the '_augmentConstruct'
    # template method on baselang to allow for baselang-specific modifications
    # of the parse tree appropriate at this time. We rely on baselang.env()
    # being set (that is, I'm moving more and more towards the BaseLanguage
    # hierarchy NOT being singletons, but rather capable of a single instance
    # for each meta file (this would allow us to parallelize, and definitely
    # helps avoid passing around env everywhere. Can config be merged into
    # baselang or env or something else as well??
    new_config = baselang._augmentConstruct(metafile, self, config)
    if new_config is not None:
      config = new_config
    methname = 'compile_' + self.kind()
    compile_something = getattr(baselang, methname, None)
    if compile_something:
      try:
        compile_something(metafile, self, config)
      except CompileTimeError as e:
        metafile.error(str(e))
        # When errors are raised, an attribute can be specified to the
        # exception to more accurately place their location.
        attribute = e.attribute() or self.primary()
        metafile.error(str(e), line=attribute.line(), pos=attribute.pos())
    else:
      raise meta.compiler.errors.InternalError(
        'BaseLanguage %s does not implement %s' % (baselang.name(), methname))

    Log.undent()


class MetaLanguage(Meta):

  # The meta-level versions of BaseLanguageOopl.CONFIG
  CONFIG = {
    'self': 'self',
    'cls': 'cls',

    'null': 'null',
    'true': 'true',
    'false': 'false',

    'rem': '/#',   # this is specified in context.tokens('remark') ... remove here?
    'rem_start': None,
    'rem_end': None,

    'class_primary': None,
    'initializer_name': None,
  }

  def id(self):
    return self._id

  def name(self):
    return self._name

  def bases(self):
    return self._bases

  def compiler(self):
    return self._compiler

  def __init__(self, id_, name, compiler):
    """Initializer.

    Args:
      id_: str
      name: str
      compiler: Compiler
    """
    super(MetaLanguage, self).__init__()
    # field id: str
    #   The representation of this class safe to use everywhere (in
    #   variable names, method names, class names, file names, etc).
    self._id = id_

    # field name: str
    #   The representation of this class used in human-readable contexts.
    self._name = name

    # field bases: dict
    #   The collection of BaseLanguage instances supported by this
    #   MetaLanguage.  A key is added for the id, name and suffix of the
    #   language.
    self._bases = {}

    # field list: list of BaseLanguage
    #   The list of BaseLanguage instances
    self._list = []

    # field schema_construct: Construct
    #   The schema associated with this meta language.  Dynamically
    #   set when 'schema()' is first called.
    self._schema_construct = None

    # field the_context: Context
    #   The Context within which the scheme was evaluated.  Dynamically
    #   set when 'schema()' is first called.
    #   TODO(wmh): Think about this some more ... kinda hacked up in order
    #   to implement compile_* in BaseLanguage classes.
    self._the_context = None

    # field bootstrap : Context
    #   A Context based on bootstrapped meta constructs, useful in parsing
    #   schema files that provide more canonical definitions of the
    #   constructs.
    self._bootstrap = Context(
      None, auto.bootstrap.BootstrapMeta(GenericConstruct))

    # field compiler: Compiler
    #   By providing each MetaLanguage with a Compiler instance, we have
    #   access to important global information at all times without having
    #   to pass it down long call chains.
    self._compiler = compiler

  def context(self):
    result = self._the_context
    if result is None:
      # This initializes both _schema and _context.
      self.schema()
      result = self._the_context
    return result

  def __str__(self):
    return 'MetaLanguage %s' % self.name()

  def baselangNamed(self, id_or_name):
    """Return the specified BaseLanguage.

    Args:
      id_or_name: str
    """
    return self._bases.get(id_or_name, None)

  def registerBase(self, base):
    bases = self._bases
    for key in [base.id(), base.name()] + base.suffixes():
      current = bases.get(key, None)
      if current and current is not base:
        raise meta.compiler.errors.InternalError(
            "Attempt to register base %s with key '%s' already used by %s"
            % (base.id(), key, current.id()))
      bases[key] = base
    self._list.append(base)

  def srcdir(self):
    """The directory containing files related to this meta language."""
    return self.metaPath('src', 'schema', self.id())

  def schema(self):
    """Obtain my schema."""
    result = self._schema_construct
    if not result:
      path = os.path.join(self.srcdir(), 'schema.meta')
      context = self._the_context
      if context is None:
        context = self._bootstrap
      parser = MetaFile(path, context, compiler=self.compiler())
      schema = parser.parseFile()
      context = parser.context()

      # TODO(wmh): This needs to be generalized.  A 'extends' attribute should
      # be added to Schema that specifies the parent MetaLanguage, and that
      # MetaLanguage should be parsed here, and all constructs from it added
      # to this language.  For now, we simply assume that all metalanugages
      # inherit from Meta(Meta).
      parent_id = 'meta'  # result.attrval('extends')
      if self.id() != parent_id:
        # TODO(wmh): Will have to do more work to avoid circularities too.
        parent = self.compiler().metalangNamed(parent_id)
        if parent:
          config = schema.attr('config')
          parent_schema = parent.schema()
          for construct in parent_schema.attrval('config'):
            config.registerConstruct(construct)
          context = Context(None, config.value())
      self._the_context = context
      self._schema_construct = schema
      result = schema
    return result


class MetaMeta(MetaLanguage):
  """The core Meta language."""

  def __init__(self, compiler):
    """Initializer.

    Args:
      compiler: Compiler
    """
    super(MetaMeta, self).__init__('meta', 'Meta', compiler)
    # This language does not have any base languages in a technical
    # sense, although conceptually all of the MetaLanguage instances
    # other than this can be thought of as base languages of this
    # meta meta language.


class MetaOopl(MetaLanguage):

  def __init__(self, compiler):
    super(MetaOopl, self).__init__('oopl', 'Oopl', compiler)
    self.registerBase(OoplCpp(self))
    self.registerBase(OoplJava(self))
    self.registerBase(OoplPython(self))
    self.registerBase(OoplPerl(self))
    self.registerBase(OoplJavascript(self))

  def func(self):
    print 'hello'


class MetaDoc(MetaLanguage):

  def __init__(self, compiler):
    super(MetaDoc, self).__init__('doc', 'Doc', compiler)
    #self.registerBase(DocTex(self))
    #self.registerBase(DocHtml(self))
    pass


class BaseLanguage(Meta):
  """Subclasses represent specific base languages within a meta language.

  Currently, this hierarchy is singleton (every class has one instance).
  However, there is certain state that would be useful to have access to
  that does not make sense if each BaseLanguage subclass instance is
  unique (for example, the FileEnv instance would be convenient to store
  here).
  """

  INDENTING_RE = re.compile('(^|\n)\s*>\|')

  # meta field TMPL_RE: re
  #   A regexp for finding variable references within template strings.
  TMPL_RE = re.compile('\$\{(>*)([a-zA-Z0-9_-]+)([!<]*)\}(.|\n)?')
  TMPL_RE_NEW = re.compile('((?:^|\n) *)?\$\{([a-zA-Z0-9_-]+)\}')

  def metalang(self):
    return self._metalang

  def id(self):
    return self._id

  def name(self):
    return self._name

  def suffixes(self):
    return self._suffixes

  def suffix(self):
    # The first suffix in the list is assumed to be the primary one.
    # TODO(wmh): suffixes will probably need to be generalized to a
    # dict mapping conceptual concept to suffix: i.e.
    #   {'source': 'cc', 'header': 'h'}
    # with "equivalent" suffixes being added as lists:
    #   {'source': ['pm', 'pl']
    return self._suffixes[0]

  def __init__(self, metalang, id_, name, suffixes, config):
    super(BaseLanguage, self).__init__()
    self._metalang = metalang
    self._id = id_
    self._name = name
    self._suffixes = suffixes

    # field config_map: dict
    #   Maps conceptual keys to base-language-specific values.  There will be
    #   a very large number of such key/value pairs, and BaseLanguage subclasses
    #   are required to provide values for all of them.  See the
    #   documentation of BaseLanguageOopl.CONFIG for details on keys.
    self._config_map = config
    # TODO(wmh): Why is this an instance field, rather than just accessing
    # cls.CONFIG?

    # field metafile: MetaFile
    #   It only makes sense to use an env field here if we do NOT make
    #   BaseLanguage instances unique singleons ... if each compilation
    #   has its own BaseLanguage, then we could update _env meaningful
    #   without breaking thread safety.
    self._env = None

  def env(self):
    return self._env

  def envIs(self, value):
    self._env = value

  def consattr(self, construct, attr, missing=EMPTY, default=None,
               extended=True):
    """High level access to construct attributes.

    Within compile_* methods, we often need to obtain attributes of the
    target construct, but using construct.attr(...) directly is cumbersome
    because so many args need to be passed every time. This method is a first
    attempt at removing the burdensomeness. I also have plans for a more a
    robust approach in which we don't need to pass the construct either
    (by storing it in self within compileMeta()) but that implementation is
    more prone to error than this one, so we'll start here first.

    This implementation properly handles baselang-specific attributes.

    Args:
      construct: construct
        The construct whose attributes are to be queried.
      attr: str
        An attribute key on the construct.
      missing: opaque
        The action to perform if attr does not exist in construct.
      default: any
        The default value to use for the value when the attribute doesn't
        exist.
      extended: bool
        If True, search for all baselang-specific variants of the attr,
        the empty attr, and '*'.

    Returns: two-tuple
     [0] Attribute or None
     [1] varies; the attribute value.
    """
    # TODO(wmh): Since we are caching metafile, should we also cache context?
    context = self.env().parser().context()
    selectors = self.allids(empty=True, meta=True) if extended else None
    attribute = construct.xattr(
      attr, missing=missing, context=context, params=selectors, all=False)
    if attribute is None:
      value = default
    elif not isinstance(attribute, Attribute):
      raise meta.compiler.errors.Error(
        'In consattrval with non-attribute %s' % attribute)
    else:
      value = attribute.value()
    if value is None:
      value = default
    return attribute, value

  def consattributes(self, construct, attrs, missing=EMPTY, default=None,
                     extended=True, output=None):
    """High level access to multiple construct attributes.

    Obtain the attribute and attribute value for various attributes, using
    the same resolution and default value scheme for all.

    Args:
      construct: construct
        The construct whose attributes are to be queried.
      attr: list of str
        The keys of the attributes desired.
      missing: int
        The action to perform if attr does not exist in construct.
      default: any
        The default value to use for the value when the attribute doesn't
        exist.
      extended: bool
        If True, search for all baselang-specific variants of the attr,
        the empty attr, and '*'.
      output: dict or None
        If present, add results to it, else create and return new dict.

    Returns: dict
      For every $a in *attr, there will be two keys in this dict, $a and
      ${}_attr.
    """
    if output is None:
      output = {}
    for attr in attrs:
      attribute, value = self.consattr(
        construct, attr, missing=missing, default=default, extended=extended)
      output[attr] = value
      output[attr + '_attr'] = attribute
    return output

  def allids(self, empty=False, meta=False):
    """Obtain all of the ids by which this base language can be identified.

    Args:
      empty: bool
        If True, add the empty string to the result.  Useful when the result
        is being passed to Construct.scope() to establish which block to pick
        and we want to support the unparameterized scope block.
      meta: bool
        If True, include '*' as well

    Returns: list of str
    """
    result = self.suffixes()[:]
    if self.id() not in result:
      result.append(self.id())
    if self.name() not in result:
      result.append(self.name())
    if empty:
      result.append('')
    if meta:
      result.append('*')
    return result

  def config(self, key):
    config_map = self._config_map
    if key not in config_map:
      pprint.pprint(config_map)
      raise meta.compiler.errors.Error(
        "BaseLanguage %s does not have a config value for '%s'" %
        (self.name(), key))
    return config_map[key]

  def baseValue(self, value):
    """Convert a meta-level token into a baselang token.

    Args:
      value: str
        The value to convert.

    Returns: str
      The converted value.  if 'value' does not represent a meta-level
      token, it is returned verbatim.
    """
    if value.startswith('{#') and value.endswith('#}'):
      result = value[2:-2]
    elif value in self._config_map:
      result = self._config_map[value]
    else:
      result = value
    return result

  def fullname(self):
    return '%s<%s>' % (self._metalang.name(), self.name())

  def __str__(self):
    return 'BaseLanguage %s' % self.name()

  def rawField(self, field_name, context):
    """Return the language and context specific raw field name.

    Args:
      field_name: str
        The field name to convert.
      context: Context
        The context within which to convert.
    """
    # TODO(wmh): How to make this base-language-specific? Or do we need to?
    return '%s%s%s' % (
      context.token('field_prefix'), field_name, context.token('field_suffix'))

  def metaConstruct(self, construct):
    """Return the Construct that defines the given construct.

    For example, of 'construct' has kind 'class', this method returns
    the 'class' construct.

    Args:
      construct: Construct

    Returns: Construct
    """
    return self.metalang().schema().attr('config').cons(construct.kind())

  def getTemplate(self, context, construct, selector=None):
    """Obtain a template specification.

    Various constructs have user-provided templates associated with them.
    For example, when generating a method, Meta finds the closest
    Construct instance in the config stack of name 'method'.  Construct
    instances have a 'template' attribute, and we can ask for the value
    of 'template<python>' to obtain the template to use for compiling
    methods.  Since 'method' constructs can be defined in any 'config'
    block, the user is in control of what gets generated at every level.

    Args:
      context: Context
        Meh.
      construct: Construct
        The construct for which a template is desired.
      selector: str
        The selector associated with the template.  This is usually a
        base language followed by additional text.  Examples include
        'python', 'java,test', 'perl-get-opt', etc.
    """
    # Obtain the template as a string.
    meta_construct = self.metaConstruct(construct)
    if not selector:
      selector = self.id()
    template_attr = meta_construct.attr('template<%s>' % selector)
    if template_attr is None:
      raise meta.compiler.errors.Error(
        'Failed to find template<%s> within %s' % (
          selector, meta_construct.kindid()))
    template = re.sub(
      BaseLanguage.INDENTING_RE, r'\1', template_attr.asStr(context))
    return template

  def getBody(self, context, scope, preamble=None, postamble=None,
              super_call=None, baserem=None, onempty=None):
    """Obtain the base-language representation of lines in scope.

    Args:
      context: Context
      scope: SimpleBlockAttribute
        Does not need to be an actual scope ... any simple block.
      preamble: str
      super_call: str
      baserem: str
        What to convert meta-level comments to.
      onempty: list or None
        The list of lines to add if scope is empty.

    Returns: list of str
    """
    body = preamble.split('\n') if preamble else []
    if scope:
      for line in scope.value():
        if super_call:
          line = line.replace('META_' + 'SUPER()', super_call)
        # TODO(wmh): line_comment is currently only for comments at the
        # beginning of the line, not those at the end.  As such, if one
        # has a line like:
        #     if value = 10:    /# this needs to be improved.
        # the /# will not be properly converted to baserem.
        line = context.re('line_comment').sub(r'\1' + baserem, line)
        body.append(line)
    if postamble:
      body.extend(postamble.split('\n'))
    if not body and onempty:
      body = onempty[:]
    return body

  def instantiateTemplate(self, context, construct, varset, selector=None,
                          template=None, debug=False):
    """Obtain the instantiated template for the specified construct.

    Every construct that can be compiled has a baselang-specific 'template'
    string that describes how the construct is formatted in the baselang in
    question.  This template contains a variety of variable substitution
    indicators, of the form '${var}', which are to be replaced with values
    passed into this method (in the 'varset' param)

    Raises:
      Error: If 'varset' does not contain all of the variables in the template.

    Args:
      context: Context
        Establishes syntax.
      construct: Construct
        The construct to instantiate the template of.
      varset: VarSet
        Maps variable references to value/attribute tuples.  The variables
        are to be replaced with the values, with association made between
        line indices and attribute line numbers.
      selector: str or None
        If None, uses the baselang id as the selector.
      template: str or None
        If not specified (the norm), the template is obtained from the
        appropriate 'template' attribute of the schema construct defining
        'construct'.
      debug: bool

    Returns: MetaSegment
      A wrapper around the collection of instantiated lines from a template.
    """
    # Obtain the multi-line template.
    # TODO(wmh): If simple block attribute values were multi-line strings
    # instead of lists of Lines, this would be much more efficient.
    if template is None:
      template = self.getTemplate(context, construct, selector=selector)

    # As part of the effort to support both exact replication and
    # canonicalization of data, we require that templates always end with
    # a newline. We remove this newline so that a spurious line is not
    # added when we split on newlines.
    if template[-1] != '\n':
      raise meta.compiler.errors.InvariantViolated(
        'Templates must end with newline')
    template = template[:-1]

    # Provide some default values.
    # TODO(wmh): These are language specific, and should be avoided here, or
    # generalized.
    fullid = construct.fullid()
    kind = construct.kind()
    if kind not in varset:
      varset.addVar(kind, construct.id(), construct.primary())
    if 'self' not in varset:
      varset.addVar('self', 'self')

    # The lines in the instantiated template.
    lines = ['']

    # mapping is a list of tuple that specify baselang to meta line
    # correspondences.  See MetaSegment._mapping docs for details.
    mapping = []

    # We create the MetaSegment that wraps lines and mapping.
    segment = MetaSegment(lines, mapping=mapping)

    # Split the template on variable references according to TMPL_RE. The re
    # contains 3 groups, so the result of the split consists of one element
    # representing the text before the first variable, followed by N groups
    # of 4 elements (the 3 groups in the regexp and the text between one var
    # and the next).
    pieces = self.__class__.TMPL_RE_NEW.split(template)

    # Add the text before the first variable.
    pretext = pieces.pop(0)
    segment.extendFromInterpolationData(pretext.split('\n'), debug=debug)

    # Invariant: |pieces| % 3 = 0
    chunk_size = 3  # number of groups in TMPL_RE plus one
    if len(pieces) % chunk_size != 0:
      raise meta.compiler.errors.Error(
        'Parsing of template failed to yield multiple of 4')

    # At all times, the text associated with a variable starts at the end
    # of lines[-1], not on a new line.  This means that empty variables do
    # not introduce spurious newlines.
    pi = 0
    while pi < len(pieces):

      # dent: str or None
      #   If None, the variable is not the first var on a line with optional
      #   preceeding whitespace.  If not None, dent[0] == '\n' and dent[1:]
      #   is the indentation to insert before each line in the multi-line
      #   value associated with the variable.
      # var: str
      #   The name of the variable to be interpolated
      # text: str
      #   The text that appears after the interpolated variable before
      #   the next variable.  This is to be inserted verbatim.
      dent, var, text = pieces[pi:pi+chunk_size]

      if debug:
        print '%s${%s} --> "%s"' % (
          dent[1:] if dent else '', var, text.replace('\n', '\\n'))

      # If dent has a newline in it, we need to add a new line to lines
      if dent and dent[0] == '\n':
        dent = dent[1:]
        lines.append(dent)

      # Obtain the information about the variable to be interpolated.
      value, attribute, elem_delim, line_width = varset.get(var)

      #if var == 'modifiers':
      #  # Note that this is after the '\n' has been stripped from dent (see above)
      #  print '**** HERE with dent="%s" var=%s text="%s" value="%s" attribute=%s' % (
      #    dent, var, text, value, attribute)

      if value is None:
        raise meta.compiler.errors.Error(
          "Missing variable '%s' for %s template:\n%s\nvarset is:\n%s" %
          (var, construct.kind(), template, varset.asStr(indent='  ')))

      # The value is either a string or a list of str|MetaSegment.
      if isinstance(value, list):
        if not elem_delim or elem_delim == '\n':
          # list-valued variable interpolation.  The first element of 'value'
          # must be a string (not a MetaSegment). This is verified in
          # MetaSegment.extendFromInterpolatedData().
          pass
        else:
          # We have a non-standard elem_delim, which means we are to form
          # a collection of lines from the list-valued value.  There are
          # various subtleties to be aware of:
          #  - If the variable appears at position N of a template line,
          #    we want to indent this many spaces in for every line added
          #    while processing value
          #  - If delim has trailing spaces, we need to clean them up when
          #    the delim ends up being the last value on a line
          #  - If a particular element exceeds the max width all on its own,
          #    we have to insert it.
          #  - The text that appears at the end of the last line in the
          #    result (i.e. first line in 'text') must be taken into account
          #    when performing line wrapping logic.
          #  - The construct in question may be indented arbitrarily after
          #    the fact, so it is up to the caller to properly set the
          #    max line width to take this into account based on the caller's
          #    knowledge of the absolute indentation that will occur.
          value_as_lines = []
          if line_width is None:
            line_width = 80
          text_after_value = text.split('\n')[0]

          # We establish where we are in the current line of output ... we
          # want to align all lines generated by 'value' so they all indent
          # to this point.  We wrap to new lines if the current line length
          # exceeds line_width.
          first_width = len(lines[-1])
          current_width = first_width
          value_as_lines.append('')
          nv = len(value)
          for i, val in enumerate(value, start=1):
            if '\n' in val:
              # If a value has newlines, we honor them, but indent each line
              # according to first_width
              vallines = val.split('\n')
              value_as_lines[-1] += vallines[0]
              for valline in vallines[1:]:
                value_as_lines.append((' ' * first_width) + valline)
            else:
              # The value does not have newlines, so we do normal wrapping.
              if i == nv:
                valplus = val
                vpsz = len(valplus) + len(text_after_value)
              else:
                valplus = val + elem_delim
                vpsz = len(valplus)

              if current_width + vpsz > line_width:
                # Cannot fit the current element on the current line, so we
                # advance to next line.  In case 'elem_delim' has trailing
                # spaces in it, we rstrip the current line.
                #
                # NOTE: The check that value_as_lines[-1] is not empty is to
                # avoid an infinite loop in situations where valplus exceeds the
                # max width without anything else being present.
                value_as_lines[-1] = value_as_lines[-1].rstrip()
                value_as_lines.append(valplus)
                current_width = first_width + vpsz
              else:
                # The current element fits on the current line (or is the only
                # element and is so large it overflows by itself)
                value_as_lines[-1] += valplus
                current_width += vpsz
          value = value_as_lines
          dent = ' ' * first_width

      else:
        # Simple variable interpolation.  However, the value may be multi-lined
        value = value.split('\n')

      if attribute:
        metaline = attribute.line()
        if attribute.isBlock():
          metaline += 1
      else:
        metaline = 0
      mapping.append((len(lines) - 1, metaline, fullid + ':' + var))

      # The following code is meant to properly deal with the following:
      #  |>  ${comment}
      #  |>  ${scope}
      # when ${comment} is empty, the entire line containing ${comment} should
      # be deleted (this is a useful feature!).  However, the current
      # implementation produces undesirable results for the following:
      #    |>
      #    |>${modifiers}${method}
      # which becomes
      #    |>${method}
      # if ${modifiers} is empty when we want it to be
      #    |>
      #    |>${method}
      # I thought that adding another test for whether text has a newline, or
      # some such (to narrow it down to apply only when a single variable
      # resides on a line) would work, but it doesn't appear to be. Need to
      # write some unittests to isolate this situation.
      if not value or (len(value) == 1 and not value[0]):
        if not lines[-1].strip():
          # We have a variable whose value is empty, and the variable
          # reference is on a line by itself.
          # TODO(wmh): This doesn't properly deal with something like
          #   >|  ${var1} ... ${var2}
          # when $var1 is empty, because when processing var1, it will
          # look like we have an empty line (so we delete it), but this
          # leaves var2 placed at the end of the previous line which is
          # not the intention in this situation.
          lines.pop()
      else:
        segment.extendFromInterpolationData(
          value, indent=dent or '', debug=debug)

      # Now add any text following the variable before the next variable.
      segment.extendFromInterpolationData(text.split('\n'), debug=debug)

      pi += chunk_size

    if not lines[-1]:
      # TODO(wmh): This has been added because a spurious newline was being
      # added when generating code from 'native' constructs. There is a newline
      # at the end of the last line in the native scope, and
      # instantiateTemplate() requires that the template itself end with a
      # newline.  Puzzingly, adding this code does not appear to have produced
      # any newline shoftages, which I had expected.
      #
      # Can we clean this up somehow?
      lines.pop()

    return segment


class BaseLanguageOopl(BaseLanguage):
  """Superclass of all BaseLanguage classes.

  HOW TO DEFINE A NEW Meta(Oopl) BASE LANGUAGE

  Suppose we want to add support a longuage called Blue to Meta(Oopl)
   - create a subclass OoplBlue of BaseLanguageOopl

   - define a CONFIG dict class variable in this class that maps conceptual
     keys to base-lang-specific values.  See other languages for the
     keys that need to be mapped.
       - TODO(wmh): Document the set of keys in CONFIG

   - define a KEYWORDS class variable (list of str) in this class that
     enumerates all of the reserved keywords of the base languages.

   - Define a initializer in the new class that invokes the
     parent, speecifying:
      1) metalang
      2) id (must be a legal file name and basic identifier)
      3) name (human readable, but no spaces)
      4) suffixes (list of common suffixes used by this language)
      5) config (dict)

    - Add a line to MetaOopl.__init__
        self.registerBase(OoplBlue);
      TODO(wmh): Support subclassing of MetaOopl and allowing users to
      control which classes are used to instatiate everything


    - Define a OoplBlue.CONFIG dict, and copy over some other
      Oopl*.CONFIG dict, updating values appropriately.

    - Extend queens.meta to have a Meta(Blue) implementation
       % cd $META/src/kernel/ex
       # edit queens.meta and add scope<blue> and run<blue> blocks, then compile
         % metac -b blue -c queens.meta
      Also edit the Makefile and add a 'blue' target (and associated target
      for .meta/oopl/blue/queens.bl)

    - Let the exceptions and errors in the following lead the way:
       % cd $META/src/kernel/ex
       % make blue

       - If you encounter an error of the form
            BaseLanguage Blue does not have a config value for '<some_key>'
         add the '<some_key>' key to OoplBlue.CONFIG

       - Implement OoplBlue._augmentVarset(), which will be used to provide
         blue-specific overrides for var/value pairs added to VarSet instances
         in generic compile_* methods defined in BaseLanguageOopl.

       - For every NotImplementedError, implement the associated method
           - compile_field()
           - formatParams()
              - use he service method BaseLanguageOopl._extractParams() to do
                the extraction of data from params.
           - metaTypeToBase()
           - _formatImports()

        - For those template methods with default implementations, verify
          that the default is working, and if not, subclass as
          appropriate. For example, a default implementation of docstr()
          exists in BaseLanguageOopl, but python replaces it entirely.

        - In $META/src/schema/oopl/schema.meta, add template<blue> and
          template<blue,test> attributes to constructs namespace, class, method,
          initializer, field, etc.
  """

  # Subclasses override this with a list of keywords in that language (i.e.
  # words that are reserved and cannot be used as identifiers).
  KEYWORDS = []

  # Subclasses overide this with baselang-specific values. Keys are:
  #   self: str
  #     What is used to reference the current object within instance methods
  #   cls: str
  #     What is used to reference the current class within class methods. If
  #     the language does not support class methods, this should be None
  #   null: str
  #     What is used to initialize a pointer value to "not a pointer"
  #   true: str
  #     The literal true token
  #   false: str
  #     The literal false token
  #   rem: str
  #     The text before a one-line comment
  #   rem_start: str
  #     The text before a multi-line comment
  #   rem_end: str
  #     The text after a multi-line comment
  #   empty_scope_segment: list of str
  #     What to insert into a stream when an empty scope is encountered.
  #   class_primary: bool
  #     If True, there is one class per file, and these per-class files
  #     are intended to be primarily visible to the end user.  If False,
  #     the per-class files are just intermediary files used to create an
  #     aggregation into a namespace file.
  #   initializer_name: str
  #     The name of the default initializer.  Use the empty string to have
  #     it be the name of the class.
  CONFIG = {}

  def compileMisc(self):
    pass

  def compile_File(self, metafile, construct, config):
    """Compile a file.

    This default implementation might work for any language that does one of
    the following:
      a) Creates files for each class
      b) Creates files for each namespace

    Any language that doesn't do one of the above will need to override
    this implementation.  Languages that do one of the above may still need
    to override in order to initialize streams, etc.
    """
    streams = metafile.streams()
    streams.initStreams('imports', 'classes')
    # Inter-class code (native blocks, etc.) are added to the 'class-preamble'
    # stream and handled by the next class (or by namespace for postamble).
    streams.initStreams('class-preamble')
    # Experimenting with inter-class code (native blocks, etc.) being added to
    # the 'class-postamble' stream and handled by the current class.
    streams.initStreams('class-postamble')

    # File instances represent a meta file.  For now, the default
    # implementation is simply to compile everything in 'scope'
    for child in construct.attrval('scope'):
      child.compileMeta(metafile, self, config)

  def compile_namespace(self, metafile, construct, config):
    """A default implentation of namespace compilation.

    Args:
      metafile: MetaFile
      construct: Construct
        A namespace construct.
      config: dict
        Controls certain implementation details:
         create: bool
           If True, create streams that do not exist.  It is usually best
           to keep this False and ensure that streams are properly
           initialized by whatever compile_* method will end up using
           the stream data.
    """
    Log.info('compile_namespace(%s)', construct.kindid())
    Log.indent()

    sep_class = config.get('sep_class', False)

    context = metafile.context()
    meta_file = metafile.filename()
    suffix = '.' + self.suffix()

    namespace = construct.id()

    # The 'comment' of the namespace becomes the docstr for the module.
    module_text = self.docstr(context, construct, indent=0)[0] + '\n\n'

    # Namespace scope is complex.  The various constructs found within a
    # namespace write into the following streams:
    #
    #   imports:
    #     The collection of import commands for the class.
    #   test-imports:
    #     The collection of import commands to add to the unittest file for
    #     the class.
    #   namespaces:
    #     A simple list of namespaces found.
    #   test-namespaces:
    #     A simple list of namespaces found.
    streams = metafile.streams()
    streams.initStreams('imports')
    if sep_class:
      streams.initStreams('imports-defn')

    # Compile each 'tests' attribute, which will write into the test-* streams.
    #
    # TODO(wmh): Should we support lexical lookup for 'tests'?  Doesn't really
    # make sense here, but how do we make it intuitively obvious to users
    # which attributes, in which contexts, do lexical LOOKUP and which do not?
    test_attr, test = self.consattr(construct, 'tests', missing=EMPTY)
    test_config = copy.deepcopy(config)
    test_config['test'] = True
    test_config['service-class'] = True
    for child in test:
      child.compileMeta(metafile, self, test_config)

    # Compile each 'scope' construct, which will write into the 'imports',
    # and 'classes' streams.
    for child in construct.attrval('scope'):
      child.compileMeta(metafile, self, config)

    source_order = self._compileNamespace(
      metafile, construct, test=False, comment=module_text.rstrip())
    # set source_order scope:
    #   msg _ on self send _compileNamespace args:
    #     arg metafile;
    #     arg construct;
    #     kwd test = False;
    #     kwd comment scope:
    #       to module_text send rstrip;

    test_order = self._compileNamespace(
      metafile, construct, source_order=None, test=True, comment='')

    Log.undent()

  def compile_category(self, metafile, construct, config):
    """A default implentation of category compilation.

    Note that cateogry constructs are expected only within classes (although
    they can be arbitrarily deeply nested within one another within a class).

    Args:
      metafile: MetaFile
      construct: Construct
        A category construct.
      config: dict
        Controls certain implementation details:
    """
    Log.info('compile_category(%s)', construct.kindid())
    Log.indent()

    is_test_method = config.get('test', False)
    stream_prefix = 'test-' if is_test_method else ''

    # The 'comment' of the category becomes a list of lines insert into the
    # stream after a category delimiter.
    context = metafile.context()
    clines = construct.attrval('comment', defattr=EMPTY)
    streams = metafile.streams()
    lines = ['', '# ' + '-' * 70]
    for line in clines:
      lines.append('# ' + line.replace('/# ', ''))
    for line in lines:
      streams.addLine('methods', line)
      streams.addLine('test-methods', line)

    # Compile each 'scope' construct.
    for child in construct.attrval('scope'):
      child.compileMeta(metafile, self, config)

    Log.undent()

  def compile_class(self, metafile, construct, config):
    """Compile a 'class' construct into a base language.

    Args:
      metafile: MetaFile
      construct: construct
      config: ?

    Side-Effects:
     
    """
    # If config['test'] is True:
    #   - we are compiling a unittest class
    #   - we write to the following streams:
    #      - 'test-namespaces': the namespace of the test class
    #      - 'test-imports': zero or more imports needed by the test class
    #      - 'test-classes': python code for the test class
    # Else:
    #   - we are compiling an actual class
    #   - we write to the following streams:
    #      - 'imports': zero or more imports needed by the class
    #      - 'classes': python code for the class
    #      - 'test-classes': python code for the associated test class
    meta_file = metafile.filename()
    context = metafile.context()
    name = construct.id()
    suffix = '.' + self.suffix()
    sep_class = config.get('sep_class', False)
    if sep_class:
      #print 'Here with sep_class True for %s in %s' % (name, self.name())
      pass

    # We first establish whether this class should be compiled for the
    # current base language or not. If the scope attribute is not
    # parameterized, or <*>, or is <baselang> for this baselang, then
    # we are to process it. If none of these are true, and the class
    # is marked as span=general, we are to create a stub class based
    # on some other baselang definition (preferred order is C++, Java,
    # Python, Perl, Javascript)
    scope, _ = self.consattr(construct, 'scope', missing=None)
    if not scope:
      span_attr, span = self.consattr(construct, 'span', missing=LOOKUP)
      if span == 'general':
        # Use an empty complex block to represent this missing scope.
        # TODO(wmh): How to ensure that scope is treated as const ... don't
        # want it modified!
        scope = ComplexBlockAttribute.Empty
      else:
        # We are not to implement this class for this baselang.
        metafile.info(
          'Not compiling %s in %s (no scope<%s> and span=%s)' % (
            construct.fullkindid(), self.name(), self.id(), span),
          attr=construct.primary())
        return

    # Determine if we are compiling a test class or a real class.
    #
    # TODO(wmh): Currently determining whether we are in a real class or
    # unittest class based on config values.  There is a 'location'
    # attribute on 'class' that could be used for the same purpose and is
    # currently unused.  Delete it? Or move to using it?  Pros and cons?
    test_prefix = 'test-'
    is_test_class = config.get('test', False)
    stream_prefix = test_prefix if is_test_class else ''

    # Class scope is complex.  The various constructs found within a class
    # write into the following streams:
    #
    #   fields:
    #     A stream that initializes the state of the object based on
    #     field definitions.
    #   accessors:
    #     A stream containing accessor methods for fields.
    #   methods:
    #     A stream containing all non-iniitializer method definitions,
    #     including field accessors.  They appear in the same order that
    #     the constructs appear in the scope.
    #
    # However, if this is a test class (is_test_class is True), data is written
    # into 'test-fields' and 'test-methods' instead.
    streams = metafile.streams()
    streams.initStreams('fields', 'accessors', 'methods')
    if sep_class:
      streams.initStreams('fields-defn', 'accessors-defn', 'methods-defn')

    # Handle namespace issues:
    #  - There are (unfortunately) multiple places to find namespaces
    #     1) If a class has an explicit 'within' attribute, it is used.
    #     2) If a class has an ancestor construct that is a 'namespace',
    #        it is used.
    #     3) Otherwise, the namespace 'main' is used.  TODO(wmh): This should
    #        probably be baselang-specific.
    namespace = 'main'
    namespace_attr = construct.attr('within', default=None, context=context)
    if namespace_attr:
      namespace = namespace_attr.value()
    else:
      namespace_cons = construct.findAncestor('namespace')
      if namespace_cons:
        namespace = namespace_cons.id()

    # Process associations.
    #   Each class specifies the list of associations it has by adding them
    #   to the 'imports' stream.  For languages that are namespace-based,
    #   class-specific imports are merged into a single namespace set in
    #   compile_namespace.
    import_list = []
    test_import_list = []
    assocs_attr, assocs = self.consattr(construct, 'associations', missing=EMPTY)

    if assocs_attr:
      my_limit = 'limit<%s>' % self.id()
      for assoc in assocs:
        # TODO(wmh): The current mechanism for limiting an assoc to
        # specific languages is a bit cumbersome, so it will probably
        # change. Because the parser does not currently allow primary
        # keys to have params, and the association value is currently
        # stored in the primary key value, there is no convient way to
        # say "this only applies to a specific base language). I've
        # introduced a 'limit' secondary attribute, and added
        # Construct.attributesMatching() which finds, in this
        # instance, all attributes starting with 'limit'. If any such
        # attributes exist, and the specific variant for this base
        # language isn't in the set, we do not process this assoc.
        #
        # Note that this is a useful idea for allowing multiple languages to
        # be selected. Will be more useful when the parser supports optional
        # attribute values (since the value is irrelevant here).
        #
        # Note that if we continue to allow many different variants of a
        # baselang to appear in attribute parameters, it makes for too much
        # ambiguity (heuristic vs algorithmic).  We could store the canonical
        # language when we parse parameters, but that implies knowledge of
        # the "type" of parameters (am exploring the possibility of them just
        # being arbitrary values).
        limits = assoc.attributesMatching('^limit', context=context)
        if limits and my_limit not in limits:
          # This import doesn't apply to this baselang
          continue
          istrs = []
        location_attr, location = self.consattr(
          assoc, 'location', missing=LOOKUP)
        kind_attr, kind = self.consattr(assoc, 'kind', missing=LOOKUP)
        assoc_spec = str(kind) + '##' + assoc.id()
        if is_test_class:
          # Remember that is_test_class True means we are compiling an actual
          # class within a test scope.  If we are compiling a non-test class,
          # it results in code being generated both in the source file and
          # the associated unittesting file.
          streams.addLine('imports', assoc_spec)
          test_import_list.append(assoc_spec)
        else:
          # Assocs can appear in the class code, the unittesting class code,
          # or in both.
          if location in ('code', 'codetest'):
            streams.addLine('imports', assoc_spec)
            import_list.append(assoc_spec)
          if location in ('unittest', 'codetest'):
            streams.addLine('test-imports', assoc_spec)
            test_import_list.append(assoc_spec)
    # We always include certain assocs in the test class
    for assoc_clsname in (
      'meta.testing.TestCase',
      '%s.%s' % (namespace, name),
    ):
      test_assoc_spec = 'cls##' + assoc_clsname
      streams.addLine('test-imports', test_assoc_spec)
      test_import_list.append(test_assoc_spec)

    # If classes in this base language have separate files for interface and
    # implementation, we add an import to the implementation file that imports
    # the interface file.
    if sep_class:
      streams.addLine('imports-defn', 'cls##%s.%s' % (namespace, name))

    # Compile the 'tests' block:
    #   If there is a tests block associated with this class, it defines
    #   service methods to be added to the unit test class. The most common
    #   such methods are setUp() and tearDown(), but numerous service methods
    #   useful in unit tests can be defined here.
    #
    #   Note that we process the 'tests' block before the 'scope' block,
    #   because we want the service methods (and any other service code)
    #   written to the 'test-methods' stream *before* we define the unit test
    #   methods themselves (in case the unit test methods rely on the service
    #   code in a lexical manner). Unit testing code is added to the
    #   'test-methods' stream (both the service code discussed above, and the
    #   unit test methods themselves, which are written when we compile the
    #   'tests' block of each 'method' in compile_method().
    #
    #   TODO(wmh): We currently assume every test class is the child of a
    #   module-specific 'TestCase' class (which must be defined in the 'tests'
    #   block of the 'class').  We will want a way to generalize this.
    test_block, _ = self.consattr(construct, 'tests', missing=EMPTY)
    if is_test_class:
      # It is erroneous to specify a 'tests' attribute on a
      metafile.warning("Ignoring 'tests' attribute of test class %s" % name)
    else:
      test_config = copy.deepcopy(config)
      test_config['test'] = True
      test_config['service-method'] = True
      for child in test_block.value():
        child.compileMeta(metafile, self, test_config)

    # Compile the 'run' block:
    #   Generate an implicit __Meta__Run method.
    run_attr, run = self.consattr(construct, 'run', missing=None)
    if run_attr:
      # Create a '__Meta__Run' method.
      if not scope:
        raise meta.compiler.errors.Error(
          'run but no scope ... fix how this is reported')
      self.createMetaRunMethod(run_attr, scope)

    # Compile the scope.
    #   The constructs in class scope (field, method, initializer, etc.) all
    #   know to write to the 'methods' and 'fields' streams.  This is true
    #   even if config['test'] is True ... we are compiling a full class so
    #   we use the normal streams.
    #
    #   When sep_class is True, constructs within class scope will also write
    #   to the methods-defn and fields-defn streams.
    if scope:
      sconfig = copy.deepcopy(config)
      if 'test' in sconfig:
        del sconfig['test']
      for child in scope.value():
        child.compileMeta(metafile, self, sconfig)

    # Write a segment to the 'classes' and/or 'test-classes' streams.  If
    # this is a test class, we only write a 'test-class' segment, but if we
    # are compiling a real class, we add a segment to both.
    #
    # NOTE: There is some subtlety here worth noting.  When compiling a
    # test class, the method segments are written to the 'test-methods' stream,
    # not the 'methods' stream.  This isn't strictly necessary ... we could
    # write to the 'methods' stream, but that would require us to distinguish
    # between a File-level test compilation and a class-level test compilation
    # (which may still be necessary).
    #
    # Note that the following invocation of compileClass applies to both
    # source classes and test classes (test classes are compiled using the
    # same control flow as source classes, just marked as tests with
    # config['test'] == True). Thus, this call is NOT just for source
    # classes.

    if is_test_class:
      # Test class
      self._compileClass(
        metafile, construct, namespace, import_list=test_import_list,
        test=False, sep_class=sep_class, dotest=True)
      
    else:
      # Core class
      self._compileClass(
        metafile, construct, namespace, import_list=import_list,
        test=False, sep_class=sep_class, dotest=False)
      if scope:
        # If the class itself does not have a 'scope' attribute, it is probably
        # an Exception subclass ... we don't bother creating a test class.
        #
        # TODO(wmh): The current implementation is a bit too coarse-grained,
        # as it also filters out actual test classes that happen to be empty.
        self._compileClass(
          metafile, construct, namespace, import_list=test_import_list,
          test=True, sep_class=sep_class, dotest=False)

  def _compile_executable(self, metafile, construct, config, initializer=False):
    """An abstraction of the work needed for 'method' and 'initializer'.

    Args:
      metafile: MetaFile
        Provides access to parser, compiler and baselang.
      construct: Construct
        The method or initializer to compile.
      config: ?
      initializer: bool
        If True, compile this as an initializer, otherwise as a method.
    """
    context = metafile.context()
    debug = config.get('debug', False)
    sep_class = config.get('sep_class', False)
    rem = context.token('remark')
    baserem = self.config('rem')

    name = construct.id()
    basel = self.id()

    scope, _ = self.consattr(construct, 'scope', missing=None)

    # If config['test'] is True:
    #  - we are compiling a method within a service unittest class (i.e. we are
    #    NOT compiling an actual method).
    #  - we write to the following streams:
    #     - test-methods: a segment representing the python for the method.
    # Else:
    #  - we are compiling a real method, which has both 'scope' and 'test'
    #    attributes.
    #  - we write to the following streams:
    #     - methods: a segment representing the python for the method
    #     - test-methods: zero or more segments representing the unittest(s)
    #       for the method.
    class_construct = construct.findAncestor('class')
    is_test_method = config.get('test', False)
    is_service_class = config.get('service-class', False)
    is_service_method = config.get('service-method', is_service_class)
    stream_prefix = 'test-' if is_test_method else ''

    visibility_attr, visibility = self.consattr(
      construct, 'visibility', missing=LOOKUP)

    # Establish variables that depend on whether we are compiling an
    # initializer or method.
    if initializer:
      inheritance = 'post_extend'
      level = 'instance'
      presence = 'concrete'
      modifiers = ''
      basetype = None
    else:
      inheritance = construct.attrval(
        'inheritance', defattr=LOOKUP, context=context)
      presence = construct.attrval('presence', defattr=LOOKUP, context=context)
      # The 'level' feature attribute establishes whether the method is
      # defined on the class or its meta-class. In languages with support for
      # both in the same class, the methods are defined where appropriate for
      # the language.
      level = construct.attrval('level', defattr=LOOKUP, context=context)
      type_attr, metatype = self.consattr(construct, 'returns', missing=LOOKUP)
      if metatype and metatype.isValid():
        basetype = self.metaTypeToBase(metatype)
      else:
        raise meta.compiler.errors.RequiredAttributeValue(
          'Attribute %s.%s is required' % (construct.kind(), 'type'))

    body = None   # None means currently unassigned

    if debug:
      print 'Here compiling %s (%s)' % (name, level)

    # Establish class and method name, and parent method args
    class_name = class_construct.id()
    #print '%s.%s: %s (tm=%s, sm=%s, sc=%s)' % (class_name, name, pprint.pformat(config), is_test_method, is_service_method, is_service_class)
    if is_test_method and not is_service_class:
      class_name += 'Test'
    if initializer:
      method_name = self.config('initializer_name') or class_name
      pargs = construct.attr('pargs', default=None, context=context)
      if pargs:
        parent_args = [pargs.valueStr(strip=True)]
      else:
        parent_args = []
    else:
      method_name = name
      pargs = construct.attr('pargs', default=None, context=context)
      if pargs:
        # We preferably want a list of each parameter, but since we don't
        # have that, we just create a list of one element containing the
        # entire set of args.
        parent_args = [pargs.value()]
      else:
        # If no pargs attribute exists, we assume that we pass the exact
        # same args to the parent as are defined for this method.
        parent_args = self.formatParams(construct, metafile, level=None)[0]

    # Feature attribute 'inheritance'
    preamble = ''
    postamble = ''
    if level == 'static':
      if inheritance != 'no_extend':
        raise meta.compiler.errors.Error(
          'static methods cannot have inheritance = %s != no_extend' %
          inheritance)
    else:
      super_call = self._superSyntax(
        construct, class_name, method_name, parent_args)

      if inheritance == 'no_extend':
        # We do not set preamble
        pass
      elif inheritance == 'pre_extend':
        postamble = super_call
      elif inheritance == 'post_extend':
        preamble = super_call
      elif inheritance == 'super_extend':
        raise meta.compiler.errors.Error(
          'Not yet supporting inheritance super_extend for %s' % basel)
      elif inheritance == 'sub_extend':
        raise meta.compiler.errors.Error(
          'Not yet supporting inheritance sub_extend for %s' % basel)
      else:
        raise meta.compiler.errors.InternalError('Unknown inheritance %s' % inheritance)

    # Feature attribute 'presence'
    # Implement feature presence concrete
    if presence == 'concrete':
      if not scope:
        # TODO(wmh): This kind of error checking should happen during/after
        # parsing, before compilation.
        metafile.error(
          'method %s is not marked as abstract, but is missing a scope' % method_name,
          line=construct.primary().line())
    # Implement feature presence abstract
    elif presence == 'abstract':
      if scope:
        metafile.error(
          'method %s marked as abstract but given a scope' % method_name)
      else:
        # Do we want to add preamble/postamble?
        body = ['raise NotImplementedError']
    else:
      raise meta.compiler.errors.InternalError('Unknown presence %s' % presence)

    # We first establish whether this method should be compiled for this base
    # language. The fact that we are at method level means that the class
    # itself should be compiled to baselang, but not necessarily the method
    # itself. If scope<$baselang> or scope<*> exists, then we are definitely to
    # create it. If no such scope exists, we normally skip compilation of this
    # method entirely, but if the 'span' attribute of the construct is
    # 'general', we create a stub method, and if the 'presence' attribute
    # of the construct is 'abstract' we create a special abstract method.
    if scope is None and body is None:
      span_attr, span = self.consattr(construct, 'span', missing=LOOKUP)
      if span == 'general':
        # We are to create a stub method (we create a dummy scope that
        # returns a dummy value of the appropriate return type.
        print 'Not yet creating stub method for %s in %s' % (
          construct.fullkindid(), self.name())
        # TODO(wmh): Compute a default value based on the return type.
        # Returning None is good for any pointer type, but we should return
        # 0 for an int return, 0.0 for a real return, etc.
        body = ['return None']
      else:
        # We are not to implement this class for this baselang.
        metafile.info(
          'Not compiling %s in %s (no scope<%s> and span=%s)' % (
            construct.fullkindid(), self.name(), self.id(), span),
          attr=construct.primary())
        return

    # Establish the values to insert into the method definition template
    params_list, typecheck, doclines, param_attr = self.formatParams(
      construct, metafile, level=level)
    varset = VarSet()
    varset.addVar('method', method_name)
    varset.addVar('typecheck', typecheck)
    varset.addVar(
      'params', params_list, attribute=param_attr, delim=', ', width=80)
    varset.addVar('modifiers', '%(modifiers)s')
    default_comment_lines = ['Initializer.'] if initializer else None
    comment_str, comment = self.docstr(
      context, construct, indent=0, add_params=True,
      default_lines=default_comment_lines)
    varset.addVar('comment', comment_str, comment)
    if body is None:
      body = self.getBody(
        context, scope, preamble=preamble, postamble=postamble, baserem=baserem)
    varset.addVar('scope', body, scope)
    varset.addVar('type', basetype)

    # Instantiate the method definition template with values and add to the
    # 'methods' stream (and, if this is sep_class, to the 'methods-defn' stream)
    if sep_class:
      # The method declaration and definition reside in separate files.
      #  - For such languages there should be two method and initializer
      #    templates, template<$baselang-decl> and template<$baselang-defn>
      self._augmentVarset(construct, varset, 'method-decl', locals())
      segment = self.instantiateTemplate(
        context, construct, varset, selector=self.id() + '-decl')
      metafile.streams().addSegment(stream_prefix + 'methods', segment)

      self._augmentVarset(construct, varset, 'method-defn', locals())
      segment = self.instantiateTemplate(
        context, construct, varset, selector=self.id() + '-defn')
      metafile.streams().addSegment(stream_prefix + 'methods-defn', segment)

    else:
      # The method declaration and definition reside in the same place.
      #  - For such languages, there should be a single method and initializer
      #    template, template<$baselang>
      self._augmentVarset(construct, varset, 'method', locals())
      segment = self.instantiateTemplate(context, construct, varset)
      metafile.streams().addSegment(stream_prefix + 'methods', segment)

    if not is_test_method:
      # TODO(wmh): Support multiple unit test methods.  One solution would be
      # to have 'test<python>', 'test<python=Extra1>', 'test<python=Extra2>',
      # where 'Extra1', 'Extra2', etc. are arbitrary ids that can be appended
      # to test names.  However, a much better solution is to have a 'tests'
      # block that is complex-valued, with a unttest construct for defining
      # methods. Whether we keep 'test' or not is an open question.
      #
      # TODO(wmh): Provide a facility for returning all attributes within a
      # construct with a given name and given prefix (or regexp!). Should
      # also support 'test', 'test<Extra1>', 'test<Extra2>' etc for situations
      # where base language is implicit.
      test_attr, _ = self.consattr(construct, 'test', missing=EMPTY)
      test_body = self.getBody(context, test_attr, baserem=baserem)
      if test_body:
        empty_test = False
      else:
        test_body = ['pass']
        empty_test = True
      if test_body:
        # If the test body is empty, we don't both generating it.
        testset = VarSet()
        testset.addVar('params', self.config('self'), attribute=None)
        testset.addVar('comment', '', attribute=None)
        # We add a special line to the test body that introduces the 'test'
        # variable as a synonym for 'self'.  Within a meta method definition
        # the 'scope' block uses 'self', so having the 'test' block also use
        # 'self' can be confusing.  Using 'test' instead makes more sense.
        if not empty_test:
          test_body = ['test = self'] + test_body
        # TODO(wmh): By adding this extra line, we will need to adjust the
        # line-mapping by one.
        testset.addVar('scope', test_body, attribute=test_attr)
        testset.addVar('type', 'void')
        testset.addVar('modifiers', 'public')

        # Call subclass hook to allow varset modifications.
        self._augmentVarset(construct, varset, 'test-method', locals())

        test_segment = self.instantiateTemplate(
          context, construct, testset, selector='%s,test' % basel)
        metafile.streams().addSegment('test-methods', test_segment)

  def compile_method(self, metafile, construct, config):
    self._compile_executable(metafile, construct, config, initializer=False)

  def compile_initializer(self, metafile, construct, config):
    self._compile_executable(metafile, construct, config, initializer=True)

  def compile_field(self, metafile, construct, config):
    raise NotImplementedError

  def compile_var(self, metafile, construct, config):
    print 'Here in %s compiling %s into %s' % (
      self, construct.kindid(), self.name())

  def compile_native(self, metafile, construct, config):
    # Although how we format the native code doesn't depend on which lexical
    # scope we are at, where we actually write the result does depend on who
    # our parent construct is.
    pyrem = self.config('rem')
    context = metafile.context()
    parent = construct.parentConstruct()
    scope, _ = self.consattr(construct, 'scope', missing=EMPTY)
    scope_str = scope.asStr(context)

    varset = VarSet()
    varset.addVar(
      'text',
      context.re('line_comment').sub(r'\1' + pyrem, scope_str),
      scope)
    location = construct.attrval('location', defattr=LOOKUP, context=context)
    stream_prefix = 'test-' if config.get('test', False) else ''

    native_segment = self.instantiateTemplate(
      context, construct, varset, template='\n${text}\n')

    kind = parent.kind()
    while kind == 'category':
      parent = parent.parentConstruct()
      kind = parent.kind()
    
    primary = construct.primary()
    streams = metafile.streams()
    if kind == 'class':
      streams.addSegment(stream_prefix + 'methods', native_segment)
    elif kind == 'method' or kind == 'initializer':
      # This will only happen if we are in a scope<*> child and are parsing
      # complex statement-level constructs.
      # TODO(wmh): Generalize this to support any statement-level construct ...
      # as long as 'method' is in our ancestor set, we want to do this.
      raise meta.compiler.errors.Error(
        'complex method native code not yet supported')
    elif kind == 'field':
      raise meta.compiler.errors.Error(
        'Not yet supporting native blocks in field constructs')
    elif kind == 'File' or kind == 'namespace':
      # Inter-class native code is written to the special 'class-preamble'
      # stream. Each class is responsible for checking this stream for content
      # and adding it to the code they generate (and removing the code from
      # the class-preamble stream). The 'namespace' code is also responsible
      # for checking this after all classes have been processed to catch any
      # postamble.
      #
      # TODO(wmh): The above implementation means that the first class in a
      # namespace captures all the non-class code before the first class
      # definition in the namespace, and that special handling of code after
      # the last class is added.  It would be conceptually (and implementation)
      # cleaner to provide a special "namespace initialization" stream to
      # write non-class code that appears before any classes are defined,
      # then have classes maintain the non-class code that appears AFTER them
      # before the next class.
      streams.addSegment(
        stream_prefix + 'class-preamble', native_segment)
      streams.addSegment(
        stream_prefix + 'class-postamble', native_segment)
    else:
      # TODO(wmh): Replace this with a proper error!
      raise meta.compiler.errors.Error(
        'Unknown parent %s for native code' % kind)

  # ----------------------------------------------------------------------
  # Template methods used by compile_* to be overridden in subclasses

  def _augmentConstruct(self, metafile, construct, config):
    """Invoked before the construct-specific compile_* method.

    This template method allows individual baselanguages to perform any
    modifications of the parse tree necessary before rendering (for example,
    some baselanguages might require certain implicit methods be added,
    etc.)

    Args:
      construct: Construct
        The construct to potentially modify
      config: dict
        Configuration information.

    Returns: dict or None
      A modified version of config, or None (meaning no modification of
      config needed).
    """
    pass

  def _augmentVarset(self, construct, varset, activity, data):
    """Modify the varset for a given construct.

    This method is invoked by the default implementations of various compile_*
    methods as a means of providing subclasses a degree of customization without
    requiring a full overriding of the methods themselves. Since the varsets are
    the culmination of what is needed to instantate a template representing
    baselanguage code, by providing a hook into base-language specific
    functionality before we instantiate the varset, subclasses should have a fair
    amount of flexibility in how they customize things.

    Args:
      construct: Construct
        The construct being compiled.
      varset: VarSet
        The varset that has been formed, ready for use in instantiating a template.
      activity: str
        A conceptual indicator of what activity is being performed.  Examples:
         'class': compiling a class
         'method': compiling a user-defined method (in either source or test class)
         'test-method': compiling a auto-generated test-method
      data: dict
        A mapping of variable to value, for variables a subclass implementation
        might find useful.
    """
    # Subclasses should use the following idiom:
    #  if activity == '...':
    #    ...
    #  elif activity == '...':
    #    ...
    #  else:
    #    super($class, self)._augmentVarset(construct, varset, activity, data)
    raise meta.compiler.errors.Error('Unrecognized activity %s' % activity)

  def compileBaseCode(self, metafile, construct=None):
    """Perform the actions needed to compile base-language code.

    Args:
      construct: Construct or None
        Identifies the level at which to perform compilation. If None, it
        means perform the most general compilation possible for the metafile
        in question.

    Returns: bool
      True on success, False if any errors occurred.
    """
    pass

  def _superSyntax(self, construct, class_name, method_name, args):
    """Return the syntax used to invoke the parent definition of method.

    Args:
      construct: Construct
        The construct (method or initializer) needing the super syntax.
      class: str
        Name of class
      method: str
        Name of method
      args: list of str
        The args to pass to the super call.

    Returns: str
      Valid base-language syntax for invoke a parent method.
    """
    raise NotImplementedError

  def docstr(self, context, construct, indent=0, add_params=False,
             default_lines=None):
    """Format the comment attribute of a construct into a base-language repr.

    Args:
      context: Context
      construct: Construct
      indent: int
      add_params: bool
        If True, we are generating a method docstr, and are to add a 'Args:'
        section to the doc string for each  parameter, and a 'Returns:' section
        for the return value.
      default_lines: list of str or None
        The list of lines to use as the docstr if one doesn't exist.
        None means don't add one.

    Returns: two-tuple
      [0] str
        Note that this value never has a trailing newline (and callers rely on
        this fact).
      [1] Attribute (the 'comment' attribute from which docstr was obtained)
    """
    comment, _ = self.consattr(construct, 'comment')
    if comment:
      docstr = comment.asStr(
        context, strip_comments=True, indent=indent,
        default_lines=default_lines)
      if docstr:
        if add_params:
          pass
        com_start = self.config('rem') + ' '
        if '\n' not in docstr.strip():
          # One-line comments should not have a newline.
          docstr = docstr[:indent] + com_start + docstr.strip()
        else:
          d = '\n' + (' ' * indent)
          dd = d + com_start
          dent = docstr[:indent]
          docstr = dent + com_start + docstr.strip().replace(d, dd)
      else:
        docstr = ''
    else:
      docstr = ''
    return docstr, comment

  def formatParams(self, construct, metafile, invocation=False, level=None):
    """Obtain the base-language-specific syntax for parameters from construct.

    Args:
      construct: Construct
      metafile: MetaFile
      invocation: bool
        If True, format args for invocation, not declaration.  This
        means the receiver is not added at the beginning, and
        default values are not included, just the var names.
      level: str or None
        One of 'instance', 'static' or 'meta', or None.  If None, it
        means we should NOT add the level-dependent receiver arg to the
        args list (this is useful, for example, when formatting a call to
        super().method(args).  This is usually NOT None though, and the fact
        that this is a keyword arg should not let one think that the default
        value is usually the correct one ... only in rare circumstances is it.

    Returns: tuple
      [0] str (the parameter listing itself)
      [1] list of str (type checking code to insert at the beginning of the
          method to validate args, assign defaults, etc.).
      [2] array of lines representing formatted documentation for all args.
      [3] the attribute to use when reporting location of generated code.
    """
    raise NotImplementedError

  def _formatImports(self, imports, formatted=False):
    """Create base-language import statements given a list of
    special meta-level import specifications.

    Args:
      imports: list of str
        The imports to format.  Normally, this is a baselang-independent
        format encoding relevant information suitable for passing to
        BaseLanguageOopl._parseEncodedImports(), but if formatted==True,
        each line is a valid import statement in the baselang in question.
      formatted: bool
        Controls how imports are interpreted. See 'imports' above.
    Returns: str
      A multi-line list of python import statements.
    """
    raise NotImplementedError

  def baseTypeToMeta(self, basetype, metac):
    """Convert a baselang type to a meta type.

    Args:
      basetype: str
        The type to convert.
      metac: Compiler
        The Compiler instance that stores the typemap.

    Returns: str
    """
    raise NotImplementedError

  def metaTypeToBase(self, metatype):
    """Convert a metatype to its base-language equivalent.

    Args:
      metatype: Type
        The type to convert.

    Returns: str
    """
    raise NotImplementedError

  def groupBaseFiles(self, filelist):
    """Group and sort a list of baselanguage files.

    Args:
      filelist: list of str
        The files to sort.

    Returns: list of list of str
      Each element is a group of paths from filelist that are to be displayed
      together (usually on the same row where possible).
    """
    raise NotImplementedError

  # ----------------------------------------------------------------------
  # Service methods used by compile_*

  def _compileNamespace(self, metafile, construct, source_order=None,
                        test=False, comment=''):
    """Compile the source (test=False) or test (test=True) code.

    Args:
      metafile: MetaFile
        Bundles all sorts of useful objects.
      construct: Construct
        The namespace construct being compiled.
      source_order: list of str or None
        If test is False, this will be None.
        If Test is True, this is the order of source classes within
        this module.
      test: bool
        True if generating test classes.
      comment: str
        The namespace-level comment.

    Returns: list of str
      The order in which the classes in this namespace were rendered
    """
    namespace = construct.id()
    streams = metafile.streams()
    context = metafile.context()
    meta_file = metafile.filename()
    local_order = None

    if self.config('class_primary'):
      # If the language has classes as primary entities, there is no
      # file created as part of namespace compilation itself (although
      # each class will have created one or more files that encode
      # namespace information).  Examples include: C++, Java, Perl
      pass

    else:
      # Languages that are NOT 'class_primary' are 'namespace-primary', which
      # means that all classes in the same namespace reside in the same
      # file.  Examples include: Python
      baselang_suffix = '.' + self.suffix()
      ns_prefix = '' if self.config('class_primary') else '.'
      if test:
        ns_suffix = '_test'
        stream_prefix = 'test-'
        selector = '%s,test' % self.id()
        dest_suffix = '_test' + baselang_suffix
      else:
        ns_suffix = ''
        stream_prefix = ''
        selector = ''
        dest_suffix = baselang_suffix

      # Establish the text to write to the namespace file.
      varset = VarSet()
      varset.addVar('comment', comment)
      varset.addVar('namespace', construct.id())

      # The order in which classes appear within this metafile (for this
      # namespace) is important. This ordering is available in the 'classes'
      # stream, augmented by source_order if provided.
      local_order = streams.stream(stream_prefix + 'classes')
      if source_order:
        local_order = local_order + source_order
      # TODO(wmh): Must obtain the ordering for the other modules by parsing
      # the namespace order file!
      poset = [local_order]

      # Classes have been written to separate files.  We obtain MetaSegment
      # instances for each file.  The order in which they are processed is
      # important, and dictated by order + source_order
      path = metafile.metapath(
        self, subpath=self.subpath(
          namespace, ns_prefix=ns_prefix, ns_suffix=ns_suffix))
      class_segments = self._loadNamespaceClassStream(
        path, baselang_suffix, poset)

      # Imports exist in the first segment of class_segments.
      varset.addVar('imports', '')

      # Any constructs found within the namespace but not within a class
      # (i.e. class-level native blocks, etc.) are added to the
      # 'class-preamble' stream so that the next class can process them. But
      # when this native code occurs after the last class, it is the
      # responsibility of namespace to handle it as postamble.
      varset.addVar(
        'postamble',
        streams.stream(stream_prefix + 'class-preamble', clear=True))

      # Allow subclasses to modify varset
      self._augmentVarset(construct, varset, 'namespace', locals())

      varset.addVar('classes', class_segments)
      segment = self.instantiateTemplate(
        context, construct, varset, selector=selector)

      # Write the module file.
      path = metafile.metapath(
        self, subpath=self.subpath(namespace, suffix=dest_suffix), mkdirs=True)

      # TODO(wmh): We need to add special support here or in 'subpath()' for
      # the situation where a namespace is being used as both a package and
      # a module.  That is, if both a/b.py and a/b/c.py exist, then we need
      # to write a/b.py to a/b/__init__.py instead of a/b.py.
      segment.serialize(metafile, path, meta_file)

    # Now that the baselang source code associated with everything in this
    # namespace has been generated, we are ready to perform baselang-level
    # compilation.
    if not test:
      self.compileBaseCode(metafile, construct=construct)

    return local_order

  def _loadNamespaceClassStream(self, namespace_dir, suffix, order):
    """Create MetaSegment instances for namespace_dir/*.<suffix>.

    Args:
      namespace_dir: str
        The directory containing the per-class files to accumulate.
      suffix: str
        The suffix to search for.
      order: list of list of str
        Partial ordering on which classes need to be generated before
        which others.  The order of processing of order[i] and order[j]
        is arbitrary, but order[i][m] must be processed before order[i][n]
        for m < n.

    Return: list of MetaSegment
    """
    # We need to replace the special 'import' block in each file with an
    # empty string (and capture the import specifications within that
    # deleted block) and properly update the line number mapping too.
    imports_re = re.compile(
      '# Imports for class \S+\n(.*)\n# End imports for class \S+\n', re.S)

    import_lines = []
    segment_map = {}
    if IO.exists(namespace_dir):
      # If a meta file consists only of native code, there won't be any
      # files here (or at least as of 2015/04/19 ... this may change as the
      # code evolves, but for now we check for that situation).
      for basename in IO.listdir(namespace_dir):
        cname, suffix2 = os.path.splitext(basename)
        if suffix2 == suffix:
          path = os.path.join(namespace_dir, basename)
          mapbasename = '.' + basename.replace(suffix, '.map')
          mappath = os.path.join(namespace_dir, mapbasename)
          if not IO.exists(mappath):
            raise meta.compiler.errors.Error('Failed to find %s' % mappath)
          segment, imports = MetaSegment.NewFromFile(
            path, mappath, extract_re=imports_re)
          if imports is None:
            print 'WARNING: Failed to find imports for %s' % basename
          else:
            for imp in imports:
              if imp:
                import_lines.append(imp)
            pass
          if segment:
            segment_map[cname] = segment

    segments = []
    for clist in order:
      for cname in clist:
        if cname in segment_map:
          segments.append(segment_map.pop(cname))
    # Any classes not specified in order can be processed in any order.
    segments.extend(segment_map.values())

    import_lines = self._formatImports(import_lines, formatted=True).split('\n')
    import_segment = MetaSegment(import_lines)
    segments.insert(0, import_segment)

    return segments

  def _compileClass(self, metafile, next_construct, namespace,
                    test=False, dotest=False, sep_class=False,
                    import_list=None):
    """Compile the source (test=False) or test (test=True) code for construct.

    Args:
      metafile: MetaFile
        Bundles all sorts of useful objects.
      next_construct: Construct
        The class construct being compiled.
      namespace: str
        The namespace into which the class is being compiled.  Used to
        determine the path to the file to write to.
      test: bool
        The current implementation in terms of 'test' and 'dotest' is more
        convoluted than it needs to be (needs to be restructured to something
        more intuitive). The compileClass method is invoked once for every
        source and test class encountered, with test=False, and dotest True
        if the class is a test class, False otherwise.  The compileClass
        method is invoked a second time for non-test methods with 'test'
        set to True and dotest set to False.

         - test is always
        The config dict passed to compile_class has a 'test' key that is
        True if we are
        True if generating test class.
      dotest: bool
        See 'test' above.
      import_list: list of str or None
        The collection of imports to include for this class.
    """
    # TODO(wmh): Currently, we compile classes as they are seen. In order to
    # handle non-class code between classes, we have each class process the
    # non-class code that appears BEFORE it after the previous class (and
    # handle non-class code after the last class within compile_namespace).
    # However, because the code that appears before the first class definition
    # within a namespace is special (and definitely not attached to the first
    # class), it makes much more sense to treat the initial non-class code
    # specially, then have each class process the non-class code AFTER it
    # before the next class definition. This, however, requires us to delay
    # compilation of classes ... when compileClass() is invoked on a
    # particular class construct, most of the compilation actually occurs on
    # the PREVIOUS class construct passed to this method (because when we see
    # this new class construct, we know that all of the non-class code between
    # it and the previous class will have been added to the previous class'
    # class-postamble stream.
    construct = next_construct
    
    streams = metafile.streams()
    context = metafile.context()

    # It is useful to know where the namespace came from (either the 'within'
    # attr on construct, or because of a lexically scoped 'namespace' ancestor
    # construct) so that we can more accurately report errors (mapping meta to
    # base line numbers)
    #
    # TODO(wmh): Fix this.  Either within attr or namespace primary attr.
    namespace_attr = None

    empty_class_segment = self.config('empty_scope_segment')

    meta_file = metafile.filename()
    class_name = construct.id()
    ns_prefix = '' if self.config('class_primary') else '.'

    if self.config('class_primary'):
      ns_suffix = ''
    else:
      # For languages in which classes are not primary, all classes within
      # a namespace are merged together into one file.  All unittest classes
      # are merged into a different file (with a _test.py suffix).
      ns_suffix = '_test' if test or dotest else ''

    baselang_suffix = '.' + self.suffix()
    if test:
      class_name += 'Test'
      stream_prefix = 'test-'
      stream_prefix2 = 'test-'
      selector = '%s,test' % self.id()
      parent_attr, parent = self.consattr(construct, 'testparent', missing=LOOKUP)
      comment_str, comment_attr = ('', None)
      scope_attr = None
      class_attr, _ = self.consattr(construct, 'test', missing=None)
    else:
      stream_prefix = ''
      stream_prefix2 = 'test-' if dotest else ''
      selector = ''
      parent_attr, parent = self.consattr(construct, 'extends', missing=LOOKUP)
      comment_str, comment_attr = self.docstr(
        context, construct, indent=0, add_params=False)
      scope_attr, _ = self.consattr(construct, 'scope', missing=EMPTY)
      class_attr = construct.primary()

    # A class always has its parent class as an association.  We add the
    # parent to the appropriate import list.
    if '.' in parent:
      imp_stream = 'test-imports' if test else 'imports'
      imp_spec = 'cls##%s' % parent
      streams.addLine(imp_stream, imp_spec)
      import_list.append(imp_spec)
      # TODO(wmh): Handle multiple parent classes.
      # TODO(wmh): Is it necessary to add to streams, or is import_list sufficient?

    field_segments = streams.stream(stream_prefix + 'fields')
    accessor_segments = streams.stream(stream_prefix + 'accessors')
    method_segments = streams.stream(stream_prefix + 'methods')
    scope_segments = field_segments + accessor_segments + method_segments
    if not scope_segments:
      scope_segments = empty_class_segment

    if sep_class:
      field_defn_segments = streams.stream(stream_prefix + 'fields-defn')
      accessor_defn_segments = streams.stream(stream_prefix + 'accessors-defn')
      method_defn_segments = streams.stream(stream_prefix + 'methods-defn')
      scope_defn_segments = (
        field_defn_segments + accessor_defn_segments + method_defn_segments)

    # Instantiate the class template.
    varset = VarSet()
    varset.addVar('class', class_name, class_attr)
    varset.addVar('parent', parent, parent_attr)
    varset.addVar('comment', comment_str, comment_attr)
    varset.addVar('scope', scope_segments, scope_attr)
    # Subclasses modify modifiers in _augmentVarset as appropriate.
    varset.addVar('modifiers', '')
    # The namespace is the meta-level namespace.  Subclasses can use
    # this value to generate class-specific namespace_spec vars, for
    # example.
    varset.addVar('namespace', namespace, namespace_attr)

    ###
    # We are creating per-class files.  We write the text for this
    # class to disk (along with a .map file).

    # Write the imports
    import_text = self._formatImports(import_list)
    if not self.config('class_primary'):
      import_text = (
        '# Imports for class %s\n%s\n# End imports for class %s' % (
        class_name, import_text, class_name))
    varset.addVar('imports', import_text)

    # We process any inter-class constructs that occurred between myself and
    # the previous class.  The stream is cleared after we process the code.
    varset.addVar(
      'preamble',
      streams.stream(stream_prefix + 'class-preamble', clear=True))

    # Allow subclasses to modify the varset (by calling the template method
    # _augmentVarset()).
    self._augmentVarset(construct, varset, 'class', locals())

    # Obtain the instantiation
    segment = self.instantiateTemplate(
      context, construct, varset, selector=selector)

    # Serialize the class to disk.
    path = metafile.metapath(
      self,
      subpath=self.subpath(
        namespace, subpaths=class_name, suffix=baselang_suffix, ns_prefix=ns_prefix,
        ns_suffix=ns_suffix),
      mkdirs=True)
    segment.serialize(metafile, path, meta_file, details=True)

    # Deal with sep_class
    if sep_class:
      # This baselanguage has two streams where most baselanguages have
      # one.  The primary stream represents class (and sub-construct)
      # declarations, and the new secondary stream represents class
      # (and sub-construct) definitions.
      defn_import_list = streams.stream('imports-defn')
      defn_import_text = self._formatImports(defn_import_list)
      defn_preamble = ''
      defn_comment = ''
      defn_scope = scope_defn_segments
      defn_namespace_spec, defn_namespace_attr, _, _ = varset.get(
        'namespace_spec')
      defn_namespace_end, defn_namespace_end_attr, _, _ = varset.get(
        'end_namespace')

      defn_varset = VarSet()
      defn_varset.addVar('class', class_name, class_attr)
      defn_varset.addVar('imports', defn_import_text)
      defn_varset.addVar('preamble', defn_preamble)
      defn_varset.addVar('comment', defn_comment)
      defn_varset.addVar('scope', defn_scope)
      defn_varset.addVar(
        'namespace_spec', defn_namespace_spec, defn_namespace_attr)
      defn_varset.addVar(
        'end_namespace', defn_namespace_end, defn_namespace_end_attr)

      defn_segment = self.instantiateTemplate(
        context, construct, defn_varset, selector=self.id() + '-defn')
      # TODO(wmh): Define a template method for this stuff, or find some
      # other way to avoid putting baselang logic in this method.
      if self.id() == 'cpp':
        assert path.endswith('.h')
        defn_path = path.replace('.h', '.cc')
        defn_segment.serialize(metafile, defn_path, meta_file, details=True)

      else:
        raise meta.compiler.errors.Error(
          'Unknown sep_class baselang %s' % self.id())

    # The 'classes' stream is a simple list of class names, establishing the
    # order of appearance. The compile_namespace method uses this to establish
    # how to stitch together the individual class files.
    streams.addLine(stream_prefix2 + 'classes', class_name)

  def _compileAccessor(self, parser, construct, scope, acctype, varset,
                       default, preamble=None):
    """Generate code for an accessor.

    Args:
      parser: MetaFile
        Used to obtain streams and context.
      construct: Construct
        The field construct for which accessor is to be generated.  If
        the field has a scope, and that scope has an 'acctype' construct,
        its body is used, else default is.
      scope: Attribute
        The relevant scope attribute of construct.  Although it could be
        obtained from construct, it has almost certainly already been obtained
        in the caller, and is expensive to get from first principles needlessly.
      acctype: str
        One of 'get', 'set' or 'ref'
      varset: VarSet
        The collection of variables to be instantiated into the accessor
        template defined on the 'field' construct. The 'scope' variable is
        set within this method.
      default: list of str
        The default implementation for this accessor.  Will be ignored if
        there is an explicit accessor of the given type specified in the field.
      preamble: list of str or None
        Lines to insert before the actual body.  These lines are NOT replaced
        by accessor-explicit scope specifications, so they should contain only
        things that implement meta mandates (type checking, visibility
        enforcement in languages without native support, etc.)

    Returns: MetaSegment
    """
    context = parser.context()

    # The cdoe representing the accessor defaults to 'default', but if there
    # is an explicit accessor, its scope body overrides.
    code = default
    accessor = scope.cons(acctype, default=None)
    if accessor:
      acc_scope = accessor.scope(context=context, default=None)
      if acc_scope:
        print 'EXPLICIT: %s.%s = %s' % (construct.id(), accessor.id(), acc_scope)
        code = acc_scope.value()

    # The full collection of lines making up the accessor includes the preamble
    if preamble is None:
      preamble = []
    varset.addVar('scope', preamble + code)

    template = self.getTemplate(
      context, construct, selector=self.id() + '-' + acctype)
    segment = self.instantiateTemplate(
      context, construct, varset, template=template)
    parser.streams().addSegment('accessors', segment)
    return segment

  def _extractParams(self, construct):
    """Extract data from 'params' of construct.

    Args:
      construct: Construct
        The construct to extact params data from.

    Returns: two-tuple:
     [0] list of dict
      Each element in list represents a param, and contains the following
      keys:
        name: name of param
        varg: True if this is a vararg param
        type: baselang type of param
        metatype: metalang type of param
        default: the default value ... if it doesn't exist, required.
        comment: the unformatted comment text for the param
        param: the Attribute representing the param
     [1] Attribute (the 'param' attribute)
    """
    result = []
    params_attr, params = self.consattr(construct, 'params', missing=EMPTY)
    for param in params:
      type_attr, metatype = self.consattr(param, 'type', missing=LOOKUP)
      kind_attr, kind = self.consattr(param, 'kind', missing=LOOKUP)
      comment_attr, comments = self.consattr(param, 'comment')
      # should we be using docstr() instead of the raw 'comments' value?
      varg = kind_attr and kind == 'vararg'
      item = {
        'name': param.id(),
        'varg': varg,
        'type': self.metaTypeToBase(metatype),
        'metatype': metatype,
        'comment': comments,
        'param': param,
      }
      defattr, _ = self.consattr(param, 'default', missing=None)
      if defattr:
        if varg:
          raise meta.compiler.errors.Error(
            'vararg %s cannot have a default' % param.kindid())
        defval = self.baseValue(defattr.valueStr())
        # fix up defval (wrap in quotes?)
        item['default'] = defval
      result.append(item)
    return result, params_attr

  def createMethod(self, name, features=None, params=None, body=None,
                   returns='void', parent_block=None, termcode=15, line=0):
    """Create a new Method construct.

    Args:
      name: str
        The name (id) of the method.
      features: dict or None
        Maps feature key to feature value.
      params: list of dict or None
        Each dict contains
          var: str (the name of the argument)
          type: str (the meta-level type as a string)
          features: dict
            Maps 'var' feature keys to values.
      body: list of str or None
        The lines of text representing the body of the method.
      returns: str
        The meta-level return type of the method as a string.
      parent_block: ComplexBlockAttribute or None
        If present, which block to add the new method to.
      line: int
        The line number to associate with all attributes of the new method.

    Returns: GenericConstruct of kind 'method'
    """
    # TODO(wmh): This can be generalize to make it easy to create an arbitrary
    # construct instance, no?  Any param is legal if it matches an attribute
    # of the desired construct type, and we can use MetaFile.CreateNewAttribute()
    # to create the appropriate Attribute instances.  Could be very powerful!
    pos = 0
    params_list = []
    if params is None:
      params = []
    for param in params:
      param_features = []
      if 'features' in param:
        for featkey, featval in params['features'].iteritems():
          param_features.append(
            FeatureAttribute(featkey, featval, line=line, pos=pos))
      secondaries = [TypeAttribute('type', param['type'], line=line, pos=pos)]
      var = GenericConstruct.New(
        'var', param['var'], feature=param_features, secondary=secondaries,
        line=line, pos=pos)
      params_list.append(var)
    params_attr = ComplexBlockAttribute(
      'params', params_list, line=line, pos=pos)
    if body is None:
      body = []

    method_features = []
    for featkey, featval in features.iteritems():
      method_features.append(
        FeatureAttribute(featkey, featval, line=line, pos=pos))
    scope_attr = SimpleBlockAttribute('scope', body, line=line, pos=pos)
    returns_attr = TypeAttribute('returns', returns, line=line, pos=pos)
    construct = GenericConstruct.New(
      'method', name, primary_type=IdAttribute,
      feature=method_features,
      secondary=[params_attr, scope_attr, returns_attr],
      termcode=termcode, line=line, pos=pos)
    if parent_block:
      parent_block.registerConstruct(construct)
    return construct

  def createMetaRunMethod(self, run, scope):
    """Generate a method given a run attribute.

    Args:
      run: Attribute
        A 'run' attribute within a class.
      scope: Attribute
        A 'scope' attribute within a class, to which we are to add
        the '_Meta_Run' method.

    Returns: construct
    """
    # This is base-language agnostic, hence its location in this file.
    line = run.line()
    pos = 0
    params_list = [
      GenericConstruct.New(
        'var', 'args',
         secondary=[TypeAttribute('type', '&#vec<@str>', line=line, pos=pos)],
         line=line, pos=pos)
      ]
    params = ComplexBlockAttribute('params', params_list, line=line, pos=pos)
    block = SimpleBlockAttribute('scope', run.value(), line=line, pos=pos)
    returns = TypeAttribute('returns', 'void', line=line, pos=pos)
    method = scope.newChild(
      'method', '_Meta_Run',
      feature=[FeatureAttribute('level', 'static', line=line, pos=pos)],
      secondary=[params, block, returns],
      line=0, pos=0,  # TODO(wmh): Fix line and pos - from 'run' attribute!
    )
    return method

  # ----------------------------------------------------------------------
  # Miscellaneous methods.

  def _parseEncodedImports(self, imports):
    """Parse the stream of imports written by compile_class().

    Args:
      imports: str
        Multiline text, one line for each association discovered during
        compilation.  The encoding is rather esoteric, and meant to be
        entirely an internal thing.  Each line of input should have the
        following syntax:
           <kind> '##' <namespace_spec>
        where
          <kind>
             is of the feature values of the 'kind' attribute of 'assoc',
          <namespace_spec> is a word of '.'-delimited parts, where 0 to 2
             of the dots may instead be '!'.

    Returns: dict of dicts
      outer keys are
        'core': represents 'simple' imports with no dots
        'full': represents explicit full imports
        'complex': represents 'from a.b import c'
      inner keys are:
        'kind': the kind of import
        'base': the initial part of the import
        'sub': optional (the 'c' part in 'from a.b import c')
        'full': base and sub and cls separated by '.'
        'alias': optional (the alias for the import)
        'cls': optional (the class name, for class imports)
    """
    result = {
      'core': [],
      'full': [],
      'complex': []
    }
    cls_re = re.compile(r'[.!]([^.!]+)$')

    for kspec in imports:
      item = {}
      kparts = kspec.split('##')
      if len(kparts) < 2:
        raise meta.compiler.errors.Error(
          'Invalid imports: %s\n%s' % (kspec, imports))
      kind = kparts[0]
      item['kind'] = kind
      spec = kparts[1]
      item['orig'] = spec
      if len(kparts) > 2:
        item['alias'] = kparts[2]
      if kind == 'cls':
        # We are to ignore the last component of the spec for purposes of
        # importation.
        cls_match = cls_re.search(spec)
        if cls_match:
          spec = cls_re.sub('', spec)
          item['cls'] = cls_match.group(1)
      imp = spec.replace('!', '.')
      parts = imp.split('.')
      sub = None
      if len(parts) == 1:
        base = imp
        key = 'core'
      else:
        bparts = spec.split('!')
        if len(bparts) == 1:
          base = spec
          key = 'full'
        else:
          base = bparts[0]
          sub = bparts[1]
          key = 'complex'
      item['base'] = base
      if sub:
        item['sub'] = sub
        full = base + '.' + sub
      else:
        full = base
      if kind == 'cls':
        full += '.' + item.get('cls', 'FIX_WHY_CLS_MISSING')
      item['full'] = full
      result[key].append(item)

    for key, items in result.iteritems():
      items.sort(key=lambda v: v['base'])

    return result

  def subpath(self, namespace, subpaths=None, suffix='', prefix='',
              ns_prefix='', ns_suffix=''):
    """Create a subpath from a namespace and other args.

    Args:
      namespace: str
        The namespace dictating the basic location of the subfile.
      subpaths: str or list of str or None
        Subpaths after the namespace.
      prefix: str
        A prefix to add to the final part of the path.
      suffix: str
        A suffix to add to the final part of the path.
      ns_prefix: str
        A prefix to add to the final part of the namespace.
      ns_suffix: str
        A suffix to add to the final part of the namespace.

    Returns: str
    """
    parts = namespace.split('.')
    parts[-1] = ns_prefix + parts[-1] + ns_suffix
    if subpaths:
      if isinstance(subpaths, str):
        parts.extend(subpaths.split('/'))
      else:
        parts.extend(subpaths)
    parts[-1] = prefix + parts[-1] + suffix
    return os.path.normpath(os.path.join(*parts))

  def _blockText(self, attr, context, indent=0, default=''):
    """Obtain the multi-line text of a block attribute.

    Args:
      attr: SimpleBlockAttribute
        The attribute to obtain text from.
      context: Context
        The context within which to generate the text.
      indent: int
        The amount of space to place at the front of each line.
      default: str
        What to use as text if the block is empty.

    Returns: str
    """
    text = attr.asStr(context, indent=indent) or default
    text = re.sub(r'(\n\s*)' + context.token('remark'), r'\1#', text)
    return text


class OoplCpp(BaseLanguageOopl):

  CONFIG = {
    'self': 'this',
    'cls': None,

    'null': 'NULL',
    'true': 'true',
    'false': 'false',

    'rem': '//',
    'rem_start': '/*',
    'rem_end': '*/',

    'empty_scope_segment': [],

    'class_primary': True,
    'initializer_name': '',  # means name of class
  }

  METATYPES = {
    'bool': 'bool',
    'char': 'char',
    'int': {
      8: 'byte',
      16: 'short',
      32: 'int',
      64: 'longlong',
    },
    'uint': {
      7: 'byte',
      15: 'short',
      31: 'int',
      63: 'long',
    },
    'real': {
      32: 'float',
      64: 'double',
    },
    'istr': 'std::string', # TODO(wmh): Change this to meta::lib::InternedString
    'str': 'std::string',
    'vec': 'std::vector<%(T1)s>',
    'map': 'std::map<%(T1)s,%(T2)s>',
  }

  # http://en.cppreference.com/w/cpp/keyword
  KEYWORDS = [
    'alignas', 'alignof', 'and', 'and_eq', 'asm', 'auto', 'bitand', 'bitor',
    'bool', 'break', 'case', 'catch', 'char', 'char16_t', 'char32_t', 'class',
    'compl', 'const', 'constexpr', 'const_cast', 'continue', 'decltype',
    'default', 'delete', 'do', 'double', 'dynamic_cast', 'else', 'enum',
    'explicit', 'export', 'extern', 'false', 'float', 'for', 'friend',
    'goto', 'if', 'inline', 'int', 'long', 'mutable', 'namespace', 'new',
    'noexcept', 'not', 'not_eq', 'nullptr', 'operator', 'or', 'or_eq',
    'private', 'protected', 'public', 'register', 'reinterpret_cast',
    'return', 'short', 'signed', 'sizeof', 'static', 'static_assert',
    'static_cast', 'struct', 'switch', 'template', 'this', 'thread_local',
    'throw', 'true', 'try', 'typedef', 'typeid', 'typename', 'union',
    'unsigned', 'using', 'virtual', 'void', 'volatile', 'wchar_t', 'while',
    'xor', 'xor_eq'
  ]

  # A C++ executable (and associated object files and library files) differs
  # based on the degree of each of the following desired:
  #  - debugging
  #  - optimization
  #  - inlining
  #  - profiling
  #
  # For each of the above, we define various levels:
  #   0 = off
  #   1 = low
  #   2 = avg
  #   3 = high
  #   4 = max
  #
  # The COMPILER variable maps each of these dimensions to level to compiler
  # used to compiler flag that implements the desired level of the given
  # dimension.  If a value does not exist at a given level, it takes on
  # the value of the level one less
  #
  # TODO(wmh): This should be stored in a user-modifiable file instead of
  # here in code.  See $METAROOT/src/templates/compiler.txt for a start at
  # this.
  COMPILER = {

    # http://gcc.gnu.org/onlinedocs/gcc/Debugging-Options.html
    'debug': {
      'off':  {'clang': '', 'g++': ''},
      'low':  {'clang': '-g', 'g++': '-g'},
    },

    # http://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
    'optimize': {
      'off':  {'clang': '-O0', 'g++': '-O0'},
      'low':  {'clang': '-O1', 'g++': '-O1'},
      'avg':  {'clang': '-O2', 'g++': '-O2'},
      'high': {'clang': '-O3', 'g++': '-O3'},
      'max':  {'clang': '-O4', 'g++': '-O4'},
    },
    'profile': {
      'off':  {'clang': '', 'g++': ''},
      'low':  {'clang': '-fprofile-generate', 'g++': '-fprofile-generate'},
    },
    'inline': {
      'off':  {'clang': '', 'g++': ''},
    },

    # These do not affect the binaries or shared libraries created.
    'warn': {
      'off':  {'clang': '', 'g++': ''},
      'low':  {'clang': '-Wall', 'g++': '-Wall'},  # fix this
      'avg':  {'clang': '-Wall', 'g++': '-Wall'},
      'high': {'clang': '-Weverything', 'g++': '-Wall'},
    },
  }

  def __init__(self, metalang):
    super(OoplCpp, self).__init__(
      metalang, 'cpp', 'C++', ['h', 'cc', 'cpp', 'c++'], OoplCpp.CONFIG)

  def compile_field(self, metafile, construct, config):
    """Compile a field construct.

    Args:
      metafile: MetaFile
      construct: Construct
      config: dict
    """
    # TODO(wmh): This is almost identical to Java's implementation.  Let's
    # combine them!

    # In C++, the templates for construct field have the following vars:
    #  field: the meta-level name of the field
    #  scope: the complete implementation of the accessor.

    streams = metafile.streams()
    context = metafile.context()

    class_construct = construct.findAncestor('class')
    class_name = class_construct.id()
    field_name = construct.id()
    baserem = self.config('rem')

    # The 'select' feature attribute determines which accessors to generate,
    select_attr, select = self.consattr(construct, 'select', missing=LOOKUP)

    # The 'visibility' feature attribute determines the visibility of
    # these accessor methods (and the visibility of the field itself),
    visibility_attr, visibility = self.consattr(
      construct, 'visibility', missing=LOOKUP)

    # The 'level' feature attribute establishes whether the field and
    # accessors are defined on the class or its meta-class.  In languages
    # with support for both in the same class, the methods are defined
    # where appropriate for the language.
    level_attr, level = self.consattr(construct, 'level', missing=LOOKUP)

    # The 'status' feature attribute specifies whether the field is optional.
    # If so, the accessor code changes significantly, in order to support
    # this semantics.  An optional field must always be visibility private.
    status_attr, status = self.consattr(construct, 'status', missing=LOOKUP)

    # The 'nature' feature attribute specifies whether the accessors for
    # the field should be inlined are not.
    nature_attr, nature = self.consattr(construct, 'nature', missing=LOOKUP)

    # The 'type' secondary attribute defines the type of the field.
    type_attr, metatype = self.consattr(construct, 'type', missing=LOOKUP)
    basetype = self.metaTypeToBase(metatype)

    # The 'default' secondary attribute provides a default value (implicit
    # initialization during constructor invocation, required value for
    # optional fields.
    #
    # TODO(wmh): Verify that default can never taken on the value None unless
    # it doesn't exist.
    #
    # TODO(wmh): For now, we do not perform any lookup on this attribute, but
    # there are obvious problems in being inconsistent about how various
    # attributes are treated.
    default_attr, default = self.consattr(construct, 'default', missing=None)

    # The 'scope' secondary attribute defines accessors and controls the
    # visibility of those accessors more so than the 'visibility' feature can.
    scope_attr, scope = self.consattr(construct, 'scope', missing=EMPTY)

    # Establish the modifiers
    acc_modifiers = []
    defn_modifiers = []
    if level == 'static':
      acc_modifiers.append('static')
      defn_modifiers.append('static')
    elif level == 'meta':
      # We need to write this field to the meta-class instead.
      print 'Not yet supporting meta-level fields in C++.'
    if nature == 'separate':
      metafile.warning(
        'Non-inline accessors in C++ are currently still placed in '
        'header files')
    else:
      acc_modifiers.append('inline')

    # Setup the varset used to instantiate accessor templates
    primary = construct.attr('field')
    if select == 'raw' and visibility == 'public':
      rawfield = construct.id()
    else:
      rawfield = self.rawField(construct.id(), context)
    varset = VarSet()
    varset.addVar('field', primary.value())
    varset.addVar('rawfield', rawfield)

    # The setter is passed an value to set the field to.  In C++, we do not
    # want this to be const, but we do want to pass object types by reference
    # yes?
    argtype = basetype
    if '@' in metatype.prefix() and metatype.base() not in Type.PRIMITIVE:
      argtype += '&'
    varset.addVar('param_type', argtype)

    # Generate the actual field definition.  Remember that the attributes of
    # the field construct refer to the high-level meta concept of field,
    # not to the low-level base-language impleemntation.  Visibility of the
    # meta-level field does not correspond to that of the baselang field ..
    # usually the unerlying field is always private (one should always access
    # via the accessors.
    #
    # TODO(wmh): For now, this is not under a template.  Should it be?
    fvis = 'public:' if visibility == 'raw' else 'protected:'
    field_defn = [fvis] + defn_modifiers + [basetype, rawfield]
    if default is not None:
      field_defn.extend(['=', default]);
    streams.addLine('fields', ' '.join(field_defn) + ';')

    # GETTER
    if True:
      # TODO(wmh): In mast base languages we only generate the accessors if
      # specified by the select, but that doesn't work ... all accessors
      # should always be declared in all base languages, just with proper
      # visibility. The problem here is that perl and python don't have proper
      # visibility constraints, and implementing them is very expensive
      # relative to what we want an accessor to be. Should we prefix the names
      # with _ for those languages?
      preamble = []
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in C++')
      else:
        code = ['return this->%s;' % rawfield]

      return_type = 'const %s' % basetype
      if '@' in  metatype.prefix():
        return_type += '&'

      get_modifiers = [
        ('public' if visibility == 'pubget' else 'protected') + ':',
      ] + acc_modifiers + [return_type]
      varset.addVar('modifiers', ' '.join(get_modifiers))
      varset.addVar('getter', field_name)

      self._compileAccessor(
        metafile, construct, scope_attr, 'get', varset, code, preamble=preamble)

    # SETTER
    if True:
      preamble = []
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in C++')
      else:
        code = ['this->%s = value;' % rawfield]

      return_type = 'void'

      set_modifiers = [
        ('public' if visibility == 'public' else 'protected') + ':'
      ] + acc_modifiers + [return_type]
      varset.addVar('modifiers', ' '.join(set_modifiers))
      varset.addVar('setter', field_name + 'Is')

      self._compileAccessor(
        metafile, construct, scope_attr, 'set', varset, code, preamble=preamble)

    # REFFER
    if True:
      preamble = []
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in C++')
      else:
        code = ['return this->%s;' % rawfield]

      return_type = basetype + '&'

      ref_modifiers = [
        ('public' if visibility == 'public' else 'protected') + ':',
      ] + acc_modifiers + [return_type]
      varset.addVar('modifiers', ' '.join(ref_modifiers))
      varset.addVar('reffer', field_name + 'Ref')

      self._compileAccessor(
        metafile, construct, scope_attr, 'ref', varset, code, preamble=preamble)

    # TODO(wmh): Add code for generating test methods for accessors.

  # ----------------------------------------------------------------------
  # Template method instantations.

  def _augmentConstruct(self, metafile, construct, config):
    """Invoked before the construct-specific compile_* method.

    This template method allows individual baselanguages to perform any
    modifications of the parse tree necessary before rendering (for example,
    some baselanguages might require certain implicit methods be added,
    etc.)

    Args:
      construct: Construct
        The construct to potentially modify
      config: dict
        Configuration information.

    Returns: dict or None
      A modified version of config, or None (meaning no modification of
      config needed).
    """
    kind = construct.kind()
    result = None

    if kind == 'namespace':
      namespace = construct.id()
      # In C++, classes and all the constructs within classes result in data
      # being written to two (header and source), rather than there being
      # a single repository of data.  The generic compile_* methods in
      # BaseLanguageOopl look to the 'sep_class' config value to determine
      # whether to set up this two-stream implementation.
      result = copy.deepcopy(config)
      result['sep_class'] = True

      # Ensure that there is a Makefile in each namespace directory
      # in namespace inheritance hierarchy.
      makefile_template_path = metafile.compiler().resource('makefile-template')
      fp = IO.reader(makefile_template_path)
      try:
        makefile_contents = fp.read()
      finally:
        IO.close(fp)
      root_dir = metafile.metapath(self)
      namespace_dir = metafile.metapath(self, subpath=self.subpath(namespace))
      if not IO.exists(namespace_dir):
        IO.makedirs(namespace_dir, 0755)
      while namespace_dir != root_dir:
        makefile_path = os.path.join(namespace_dir, 'Makefile')
        fp = IO.writer(makefile_path)
        try:
          fp.write(makefile_contents)
        finally:
          IO.close(fp)
        logging.info(
          'Wrote %d bytes to %s', len(makefile_contents), makefile_path)
        namespace_dir = os.path.dirname(namespace_dir)

    return result

  def _augmentVarset(self, construct, varset, activity, data):
    """Modify the varset for a given construct.

    This method is invoked by the default implementations of various compile_*
    methods as a means of providing subclasses a degree of customization without
    requiring a full overriding of the methods themselves. Since the varsets are
    the culmination of what is needed to instantate a template representing
    baselanguage code, by providing a hook into base-language specific
    functionality before we instantiate the varset, subclasses should have a fair
    amount of flexibility in how they customize things.

    Args:
      construct: Construct
        The construct being compiled.
      varset: VarSet
        The varset that has been formed, ready for use in instantiating a template.
      activity: str
        A conceptual indicator of what activity is being performed.  Examples:
         'class': compiling a class
         'method': compiling a user-defined method (in either source or test class)
         'test-method': compiling a auto-generated test-method
      data: dict
        A mapping of variable to value, for variables a subclass implementation
        might find useful.
    """
    if activity == 'namespace':
      pass

    elif activity == 'class' or activity == 'class-by-namespace':
      # In C++, when defining parents, one can specify whether to inherit
      # public, protected or private.  The current simple string value of
      # 'extends' means we either need to use special syntax to indicate this,
      # or we make 'extends' a complex-block-valued attribute and introduce
      # a 'cls' construct or some such that allows us to provide more info.
      # This same 'cls' construct would be used for the 'interface' construct
      # as well, but some issues:
      #   - there is usually only one element in the 'extends' block (single
      #     inheritance is most common)
      #   - there is only one additional piece of information associated with
      #     'cls'.
      # Not sure it is worth it, but I don't like special hacky syntax either.
      spec_list = []
      _, extends = self.consattr(construct, 'extends', missing=LOOKUP)
      if extends and extends != '""':
        spec_list.append('public ' + extends.replace('.', '::'))
      _, implements = self.consattr(construct, 'implements', missing=LOOKUP)
      implements_list = implements.data()  # implements is a LiteralList
      for impl in implements.data():
        spec_list.append('public ' + impl.replace('.', '::'))
      parent_spec = ' : ' + ', '.join(spec_list) if spec_list else ''
      varset.addVar('parent_spec', parent_spec)

      # TODO(wmh): How to implement class visibility in C++?
      modifiers = []
      _, visibility = self.consattr(construct, 'visibility', missing=LOOKUP)
      if modifiers:
        varset.addVar(
          'modifiers', ' '.join(modifiers) + ' ', construct.primary())

      # Define $namespace_spec, which identifies which namespace the class
      # resides within.
      namespace, namespace_attr, delim, width = varset.get('namespace')

      namespace_lines = []
      namespace_ends = []
      indent = ''
      for part in namespace.split('.'):
        namespace_lines.append(indent + 'namespace ' + part + ' {')
        namespace_ends.append(indent + '}')
        # Disable this if we don't want indentation (since we won't be
        # indenting the class definition itself ... or should we?)
        indent += '  '
      namespace_ends.reverse()

      # TODO(wmh): If we added support to BaseLanguage.instantiateTemplate()
      # for simple replacement requests like:
      #     ${namespace/pattern/replacement}
      # we could avoid various of these language-specific hardcodings by
      # giving the person writing the template more power.
      varset.addVar('namespace_under', namespace.replace('.', '_'))
      varset.addVar(
        'namespace_spec', namespace_lines, namespace_attr)
      varset.addVar('end_namespace', namespace_ends, namespace_attr)

    elif activity == 'method-decl':
      mods = []

      visibility = data['visibility']
      # Currently, meta visibilities map to C++ visibilities. TODO(wmh): This
      # will not necessarily always be true. How to set things up so that when
      # an additional feature value is added to a particular attribute, the
      # code for various languages does not end up breaking? For example, I'm
      # blindly copying meta visibility into Java visibility, but if I add a
      # new visibility type to meta, this will break java for those types.
      mods.append(visibility + ':')

      level = data['level']
      if level == 'instance':
        # nothing needed for this default kind
        pass
      elif level == 'static':
        mods.append('static')
      elif level == 'meta':
        # Meta methods are written to a meta class, and are treated as instance
        # variables there, so no kind modifier needed.
        pass
      else:
        raise meta.compiler.errors.InternalError('Invalid level %s' % level)
      varset.interpolate('modifiers', {'modifiers': ' '.join(mods)})

    elif activity == 'method-defn':
      class_construct = construct.findAncestor('class')
      class_name = class_construct.id()
      method_name = (
        class_name if construct.kind() == 'initializer' else construct.id())
      varset.addVar('method', '%s::%s' % (class_name, method_name))

    elif activity == 'test-method':
      print '****** HERE'

    else:
      super(OoplJava, self)._augmentVarset(construct, varset, activity, data)

  def compileBaseCode(self, metafile, construct=None):
    """Perform the actions needed to compile base-language code.

    Args:
      metafile: MetaFile
      construct: Construct or None
        Identifies the level at which to perform compilation. If None, it
        means perform the most general compilation possible for the metafile
        in question.
      test: bool
        True if we are processing unittest code.

    Returns: bool
      True on success, False if any errors occurred.
    """
    # In C++, we do not perform any per-construct compilation, because any
    # change in any C++ source or header file requires not only compilation
    # of the class itself, but also the shared library representing the
    # namespace within which it belongs, and the parent namespace, etc.
    #
    # The Makefile associated with each namespace directory knows how to
    # perform this recursive library compilation, so when this method is
    # invoked with construct None, we simply obtain the set of top-level
    # namespaces affiliated with the meta files
    if construct is None:
      namespace_path = metafile.metapath(self, mkdirs=True)

      # We establish the set of user-controlled levels (across various
      # compilation dimensions) and their associated compiler-specific
      # flag correlates.  These will be used to establish values for the
      # following Makefile variables:
      #   CXX            = the C++ compiler (usually clang or g++)
      #   OBJ_SUBDIR     = some encoding of platform and dimension levels,
      #                    suitable for use as a directory subpath.
      #   LEVEL_CFLAGS   = dimension-specific flags to be sent to the compiler
      #   LEVEL_CFLAGS   = dimension-sepcific flags to be sent to the linker
      level_ldflags = []
      level_ldflags.append('-fprofile-generate')
      # Every namespace that this namespace is associated with needs to be
      # linked with it.  But this means we should be performing a dependency
      # analysis amongst all namespaces to ensure that we compile the
      # 'lowest-level' ones first, or else we risk not finding a required
      # lib, or using an old version.  Note that this is only necessary in
      # OSX ... unix doesn't require these libs be included.
      level_ldflags.append('-lmeta.root')

      cvars = {}
      cvars['CXX'] = 'clang'
      cvars['LEVEL_LDFLAGS'] = ' '.join(level_ldflags)

      # We obtain the list of all top-level namespaces within the FileEnv
      # currently associated with baselang, and invoke _compileCode() on
      # each of them.
      #
      # TODO(wmh): We should be able to parallelize compilation of
      # namespaces as long as they aren't parent/child.
      for child in IO.listdir(namespace_path):
        root_ns_dir = os.path.join(namespace_path, child)
        if not os.path.isdir(root_ns_dir):
          print 'ignoring %s (not a dir)' % root_ns_dir
          continue
        if root_ns_dir.endswith('_test'):
          # TODO(wmh): Fix this ... test classes should not be in a separate
          # directory from base classes.
          print 'ignoring %s' % root_ns_dir
          continue
        self._compileCode(root_ns_dir, cvars=cvars)

  def _superSyntax(self, construct, class_name, method_name, args):
    """Return the syntax used to invoke the parent definition of method.

    Args:
      construct: Construct
        The construct (method or initializer) needing the super syntax.
      class_name: str
        Name of class
      method_name: str
        Name of method
      args: list of str
        The args to pass to the super call.

    Returns: str
      Valid base-language syntax for invoke a parent method.
    """
    # Note that in C++, the superSyntax is not placed within the method
    # body, but rather is added to the initializer-list before the body.
    # As such, the implementation of how super-invocations are made needs
    # to be revamped.  Instead of embedding them into preamble and postamble,
    # which gets emboded into scope, we need to define pre_super and post_super
    # varset vars and change the templates of at least initializers in all
    # classes to use these new vars.

    # For now, we return empty string here so that C++ doesn't generate
    # invalid code always (it just won't properly call non-default base class
    # initializers).
    return ''

    class_construct = construct.parentConstruct()
    parent_attr, parent = self.consattr(
      class_construct, 'extends', missing=LOOKUP)
    return '%s(%s)' % (parent.replace('.', '::'), ', '.join(args))

  def formatParams(self, construct, metafile, invocation=False, level=None):
    """Obtain the python java for parameters from construct.

    Args:
      construct: Construct
      metafile: MetaFile
      invocation: bool
        If True, format args for invocation, not declaration.  This
        means the receiver is not added at the beginning, and
        default values are not included, just the var names.
      level: str or None
        One of 'instance', 'static' or 'meta', or None.

    Returns: tuple
      [0] str (the parameter listing itself)
      [1] list of str (type checking code to insert at the beginning of the
          method to validate args, assign defaults, etc.).
      [2] array of lines representing formatted documentation for all args.
      [3] the attribute to use when reporting location of generated code.
    """
    # TODO(wmh): This is the same implementation as Java ... remove code
    # redundancy.
    args = []
    typechecks = []  # always empty for Java
    doclines = []

    info_list, params = self._extractParams(construct)
    for info in info_list:
      arg_parts = [info['type'], info['name']]
      if 'default' in info:
        arg_parts.extend(['=', info['default']])
      args.append(' '.join(arg_parts))
    attribute = params if params.value() else construct.primary()

    return args, typechecks, doclines, attribute

  def _formatImports(self, imports, formatted=False):
    """Create base-language import statements given a list of
    special meta-level import specifications.

    Args:
      imports: list of str
        The imports to format.  Normally, this is a baselang-independent
        format encoding relevant information suitable for passing to
        BaseLanguageOopl._parseEncodedImports(), but if formatted==True,
        each line is a valid import statement in the baselang in question.
      formatted: bool
        Controls how imports are interpreted. See 'imports' above.
    Returns: str
      A multi-line list of python import statements.
    """
    lines = []
    data = self._parseEncodedImports(imports)
    merged = sorted(
      data['core'] + data['complex'] + data['full'], key=lambda v: v['base'])
    for item in merged:
      kind = item['kind']
      subpath = item['full'].replace('.', '/')
      if kind == 'cls':
        line = '#include "%s.h"' % subpath
      elif kind == 'lib':
        line = '#include "%s"' % subpath
      elif kind == 'stdlib':
        line = '#include <%s>' % subpath
      lines.append(line)
    return '\n'.join(sorted(set(lines)))

  def metaTypeToBase(self, metatype):
    """Convert a metatype to its base-language equivalent.

    Args:
      metatype: Type
        The type to convert.

    Returns: str
    """
    if not metatype.isValid():
      result = metatype.raw()
      if result.startswith('{#') and result.endswith('#}'):
        result = result[2:-2].replace('_', ' ')
    else:
      prefixes = list(metatype.prefix() or '')
      raw = metatype.raw()
      base = metatype.base()
      params = metatype.params()

      param_bases = []
      param_map = {}
      if params:
        for i, param_metatype in enumerate(params, start=1):
          param_base = self.metaTypeToBase(param_metatype)
          param_bases.append(param_base)
          param_map['T%d' % i] = param_base

      metatypes = self.METATYPES
      if base in metatypes:
        # primitive or native type
        basetype = metatypes[base]
        if isinstance(basetype, dict):
          # This type expects an integer arg
          arg = int(params[0].raw()) if params else 32
          while arg not in basetype:
            arg += 1
            if arg > 64:
              raise meta.compiler.errors.Error(
                'No java type for meta %s' % raw)
          result = basetype[arg] % param_map
        else:
          result = basetype % param_map
      else:
        # class type - @, & and a single '*' all map to the raw class type.
        result = base

      # We've established the base type (in result). Now we add in the
      # prefixes.
      prefs = metatype.prefix()
      origpref = prefs
      has_copy = False
      has_ref = False
      obj_const = False

      if '@' in prefs:
        # There can only ever be one @ in the prefixes for a metatype, and
        # it is always at the end except for an optional '#'.
        has_copy = True
        prefs = prefs.replace('@', '')
      if '&' in prefs:
        # There can only ever be one & in the prefixes for a metatype.
        has_ref = True
        prefs = prefs.replace('&', '')
      if prefs and prefs[-1] == '#':
        obj_const = True
        prefs = prefs[:-1]

      #print "origpref='%s' pref='%s' copy=%s ref=%s const=%s" % (origpref, prefs, has_copy, has_ref, obj_const)

      # Now prefs consists of any number of #* and/or *
      parts = []
      if obj_const:
        parts.append('const ')
      parts.append(result)
      parts.append(prefs.replace('*#', 'const*'))
      if has_ref:
        parts.append('&')

      result = ''.join(parts)

    return result

  def groupBaseFiles(self, filelist):
    """Group and sort a list of baselanguage files.

    Args:
      filelist: list of str
        The files to sort.

    Returns: list of list of str
      Each element is a group of paths from filelist that are to be displayed
      together (usually on the same row where possible).
    """
    # We want to group file.py and file_test.py into a group.
    groups = collections.OrderedDict()
    pre = re.compile('((?:.*/)?\S+?)(\.[^.]+)$')
    for path in sorted(filelist):
      match = pre.match(path)
      if match:
        basepath, suffix = match.groups()
        groups.setdefault(basepath, []).append(path)
      else:
        groups[path] = [path]
    result = []
    for group, sublist in groups.iteritems():
      if sublist[0].endswith('.cc'):
        result.append(list(reversed(sublist)))
      else:
        result.append(sublist)
    return result

  # ----------------------------------------------------------------------
  # Service methods

  def _compileCode(self, namespace_dir, cvars=None):
    """Compile all code needed to create a shared library for a directory.

    Args:
      namespace_dir: str
        A directory representing a namespace of C++ code.
      cvars: dict or None
        The list of compilation-related variables whose defaults are to be
        overridden.  The Makefile assigned to each namespace directory in
        C++ assumes the following variables will be overridden:
          CXX           --> compiler to use (clang or g++)
          OBJ_SUBDIR    --> some encoding of all the flags that affect binaries
          LEVEL_CFLAGS  --> the flags to be sent to the compiler related to
                            user-controlled levels across various dimensions.
          LEVEL_LDFLAGS --> the flags to be sent to the linker related to
                            user-controlled levels across various dimensions.
    """
    args = ['make']
    if cvars:
      for key, value in cvars.iteritems():
        args.append('%s=%s' % (key, value))
    args.append('recursive-lib')
    p = subprocess.Popen(
      args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=namespace_dir)
    print '=' * 70
    print '%% cd %s' % namespace_dir
    print '%% %s' % ' '.join(args)
    stdout, stderr = p.communicate()
    print 'Status : %d' % p.returncode
    print '-' * 70
    for line in stdout.split('\n'):
      print line


class OoplJava(BaseLanguageOopl):

  CONFIG = {
    'self': 'self',
    'cls': None,
    # TODO(wmh): Should the meta-to-base mapping be a subdict within
    # config, something else entirely, etc.?
    'null': 'null',
    'true': 'true',
    'false': 'false',

    'rem': '//',
    'rem_start': '/*',
    'rem_end': '*/',

    'empty_scope_segment': [],

    'class_primary': True,
    'initializer_name': '',  # means name of class
  }

  METATYPES = {
    'bool': ('boolean', 'Boolean'),
    'char': ('char', 'Character'),
    'int': {
      8: ('byte', 'Byte'),
      16: ('short', 'Short'),
      32: ('int', 'Integer'),
      64: ('long', 'Long')
    },
    'uint': {
      7: ('byte', 'Byte'),
      15: ('short', 'Short'),
      31: ('int', 'Integer'),
      63: ('long', 'Long')
    },
    'real': {
      32: ('float', 'Float'),
      64: ('double', 'Double'),
    },
    'str': ('String',),
    'vec': ('Vector',),
    'map': ('HashMap',),
  }

  # http://en.wikipedia.org/wiki/List_of_Java_keywords
  KEYWORDS = [
    'abstract', 'assert', 'boolean', 'break', 'byte', 'case', 'catch', 'char',
    'class', 'const', 'continue', 'default', 'do', 'double', 'else', 'enum',
    'extends', 'final', 'finally', 'float', 'for', 'goto', 'if', 'implements',
    'import', 'instanceof', 'int', 'interface', 'long', 'native', 'new',
    'package', 'private', 'protected', 'public', 'return', 'short', 'static',
    'strictfp', 'super', 'switch', 'synchronized', 'this', 'throw', 'throws',
    'transient', 'try', 'void', 'volatile', 'while']

  def __init__(self, metalang):
    super(OoplJava, self).__init__(
      metalang, 'java', 'Java', ['java'], OoplJava.CONFIG)

  def compile_field(self, metafile, construct, config):
    """Compile a field construct.

    Args:
      metafile: MetaFile
      construct: Construct
      config: dict
    """
    # In java, the templates for construct field have the following vars:
    #  field: the meta-level name of the field
    #  scope: the complete implementation of the accessor.

    streams = metafile.streams()
    context = metafile.context()

    class_construct = construct.findAncestor('class')
    class_name = class_construct.id()
    baserem = self.config('rem')

    # The 'select' feature attribute determines which accessors to generate,
    select_attr, select = self.consattr(construct, 'select', missing=LOOKUP)

    # The 'visibility' feature attribute determines the visibility of
    # these accessor methods (and the visibility of the field itself),
    visibility_attr, visibility = self.consattr(
      construct, 'visibility', missing=LOOKUP)

    # The 'level' feature attribute establishes whether the field and
    # accessors are defined on the class or its meta-class.  In languages
    # with support for both in the same class, the methods are defined
    # where appropriate for the language.
    level_attr, level = self.consattr(construct, 'level', missing=LOOKUP)

    # The 'status' feature attribute specifies whether the field is optional.
    # If so, the accessor code changes significantly, in order to support
    # this semantics.  An optional field must always be visibility private.
    status_attr, status = self.consattr(construct, 'status', missing=LOOKUP)

    # The 'type' secondary attribute defines the type of the field.
    type_attr, metatype = self.consattr(construct, 'type', missing=LOOKUP)
    basetype = self.metaTypeToBase(metatype)

    # The 'default' secondary attribute provides a default value (implicit
    # initialization during constructor invocation, required value for
    # optional fields.
    #
    # TODO(wmh): Verify that default can never taken on the value None unless
    # it doesn't exist.
    #
    # TODO(wmh): For now, we do not perform any lookup on this attribute, but
    # there are obvious problems in being inconsistent about how various
    # attributes are treated.
    default_attr, default = self.consattr(construct, 'default', missing=None)

    # The 'scope' secondary attribute defines accessors and controls the
    # visibility of those accessors more so than the 'visibility' feature can.
    scope_attr, scope = self.consattr(construct, 'scope', missing=EMPTY)

    # Establish the modifiers
    modifiers = []
    if level == 'static':
      modifiers.append('static')
    elif level == 'meta':
      # We need to write this field to the meta-class instead.
      print 'Not yet supporting meta-level fields in Java.'

    # Setup the varset used to instantiate accessor templates
    primary = construct.attr('field')
    rawfield = self.rawField(construct.id(), context)
    if select == 'raw' and visibility == 'public':
      rawfield = construct.id()
    varset = VarSet()
    varset.addVar('field', primary.value())
    varset.addVar('rawfield', rawfield)

    # The setter is passed an value to set the field to.  In Java, should
    # this be some modification of basetype, or is basetype sufficient?
    # Should 'final' be added to non-primitive types? What if the basetype
    # has 'final' in it ... should it be removed?
    varset.addVar('param_type', basetype)

    # Generate the actual field definition.  Remember that the attributes of
    # the field construct refer to the high-level meta concept of field,
    # not to the low-level base-language impleemntation.  Visibility of the
    # meta-level field does not correspond to that of the baselang field ..
    # usually the unerlying field is always private (one should always access
    # via the accessors.
    #
    # TODO(wmh): For now, this is not under a template.  Should it be?
    fvis = 'public' if visibility == 'raw' else 'protected'
    fmods = [fvis] + modifiers + [basetype, rawfield]
    if default is not None:
      fmods.extend(['=', default]);
    streams.addLine('fields', ' '.join(fmods) + ';')

    if 'get' in select:
      preamble = []
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Java')
      else:
        code = ['return this.%s;' % rawfield]

      get_modifiers = [
        'public' if visibility in ('pubget', 'public') else 'protected'
      ] + modifiers
      get_modifiers.append(basetype)
      varset.addVar('modifiers', ' '.join(get_modifiers))

      self._compileAccessor(
        metafile, construct, scope_attr, 'get', varset, code, preamble=preamble)

    if 'set' in select:
      preamble = []
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Java')
      else:
        code = ['this.%s = value;' % rawfield]

      set_modifiers = [
        'public' if visibility == 'public' else 'protected'] + modifiers
      set_modifiers.append('void')
      varset.addVar('modifiers', ' '.join(set_modifiers))

      self._compileAccessor(
        metafile, construct, scope_attr, 'set', varset, code, preamble=preamble)

    if 'ref' in select:
      preamble = []
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Java')
      else:
        code = ['return this.%s;' % rawfield]

      ref_modifiers = [
        'public' if visibility == 'public' else 'protected'] + modifiers
      # TODO(wmh): This does not provide reference semantics.  Need to look
      # into whether this is possible in Java, and if so, how.
      ref_modifiers.append(basetype)
      varset.addVar('modifiers', ' '.join(ref_modifiers))

      self._compileAccessor(
        metafile, construct, scope_attr, 'ref', varset, code, preamble=preamble)

    # TODO(wmh): Add code for generating test methods for accessors.

  # ----------------------------------------------------------------------
  # Template method instantations.

  def _augmentConstruct(self, metafile, construct, config):
    """Invoked before the construct-specific compile_* method.

    This template method allows individual baselanguages to perform any
    modifications of the parse tree necessary before rendering (for example,
    some baselanguages might require certain implicit methods be added,
    etc.)

    Args:
      construct: Construct
        The construct to potentially modify
      config: dict
        Configuration information.

    Returns: dict or None
      A modified version of config, or None (meaning no modification of
      config needed).
    """

    if construct.kind() == 'class':
      # In Java, we also want to write a 'Main' method into any class that
      # has a run block (in addition to the _Meta_Run' method that is
      # generated.  The Main() method will simply call _Meta_Run().
      run_attr, _ = self.consattr(construct, 'run', missing=None)
      if run_attr:
        scope_attr, _ = self.consattr(construct, 'scope', missing=None)
        main_method = scope_attr.cons('Main', default=None)
        if not main_method:
          method = self.createMethod(
           'main',
           features={'visibility': 'public', 'level': 'static'},
           params=[{'var': 'args', 'type': '{#String[]#}'}],
           body=[
             'Vector vargs = new Vector();',
             'for (int i = 0; i < args.length; ++i) {',
             '  vargs.add(args[i]);',
             '}',
             '%s._Meta_Run(vargs);' % construct.id(),
           ],
           parent_block=scope_attr)

  def _augmentVarset(self, construct, varset, activity, data):
    """Modify the varset for a given construct.

    This method is invoked by the default implementations of various compile_*
    methods as a means of providing subclasses a degree of customization without
    requiring a full overriding of the methods themselves. Since the varsets are
    the culmination of what is needed to instantate a template representing
    baselanguage code, by providing a hook into base-language specific
    functionality before we instantiate the varset, subclasses should have a fair
    amount of flexibility in how they customize things.

    Args:
      construct: Construct
        The construct being compiled.
      varset: VarSet
        The varset that has been formed, ready for use in instantiating a template.
      activity: str
        A conceptual indicator of what activity is being performed.  Examples:
         'class': compiling a class
         'method': compiling a user-defined method (in either source or test class)
         'test-method': compiling a auto-generated test-method
      data: dict
        A mapping of variable to value, for variables a subclass implementation
        might find useful.
    """
    if activity == 'namespace':
      pass

    elif activity == 'class' or activity == 'class-by-namespace':
      # Define $parent_spec, which identifiers the parent and interfaces.
      spec = ''
      _, extends = self.consattr(construct, 'extends', missing=LOOKUP)
      if extends:
        spec += ' extends ' + extends
      _, implements = self.consattr(construct, 'implements', missing=LOOKUP)
      # implements is a LiteralList.
      implements_list = implements.data()
      if implements_list:
        spec += ' implements ' + ', '.join(implements)
      varset.addVar('parent_spec', spec)

      # Define $modifiers (visibility and abstract)
      _, abstract = self.consattr(construct, 'abstract', missing=LOOKUP)
      _, visibility = self.consattr(construct, 'visibility', missing=LOOKUP)
      modifiers = []
      if abstract and abstract == 'abstract':
        modifiers.append('abstract')
      if visibility:
        # TODO(wmh): Dumping meta-level value into base language without any
        # filtering is problematic wrt future extension.  Should we be
        # checking for expected values?   What if we maintained a dictionary
        # for each feature attribute mapping attribute meta-level feature value
        # to base-lang feature value?  A missing element would be detected and
        # reported as an error.
        modifiers.append(visibility)
      if modifiers:
        varset.addVar(
          'modifiers', ' '.join(modifiers) + ' ', construct.primary())

      # Define $namespace_spec, which identifies which namespace the class
      # resides within.
      namespace, namespace_attr, delim, width = varset.get('namespace')
      varset.addVar('namespace_spec', 'package %s;' % namespace, namespace_attr)

    elif activity == 'method':
      mods = []

      visibility = data['visibility']
      # Currently, meta visibilities map to java visibilities. TODO(wmh): This
      # will not necessarily always be true. How to set things up so that when an
      # additional feature value is added to a particular attribute, the code for
      # various languages does not end up breaking? For example, I'm blindly
      # copying meta visibility into Java visibility, but if I add a new
      # visibility type to meta, this will break java for those types.
      mods.append(visibility)

      level = data['level']
      if level == 'instance':
        # nothing needed for this default kind
        pass
      elif level == 'static':
        mods.append('static')
      elif level == 'meta':
        # Meta methods are written to a meta class, and are treated as instance
        # variables there, so no kind modifier needed.
        pass
      else:
        raise meta.compiler.errors.InternalError('Invalid level %s' % level)
      varset.interpolate('modifiers', {'modifiers': ' '.join(mods)})

    elif activity == 'test-method':
      print '****** HERE'

    else:
      super(OoplJava, self)._augmentVarset(construct, varset, activity, data)

  def _superSyntax(self, construct, class_name, method_name, args):
    """Return the syntax used to invoke the parent definition of method.

    Args:
      construct: Construct
        The construct (method or initializer) needing the super syntax.
      class_name: str
        Name of class
      method_name: str
        Name of method
      args: list of str
        The args to pass to the super call.

    Returns: str
      Valid base-language syntax for invoke a parent method.
    """
    return 'super(%s);' % ', '.join(args)

  def formatParams(self, construct, metafile, invocation=False, level=None):
    """Obtain the python java for parameters from construct.

    Args:
      construct: Construct
      metafile: MetaFile
      invocation: bool
        If True, format args for invocation, not declaration.  This
        means the receiver is not added at the beginning, and
        default values are not included, just the var names.
      level: str or None
        One of 'instance', 'static' or 'meta', or None.

    Returns: tuple
      [0] str (the parameter listing itself)
      [1] list of str (type checking code to insert at the beginning of the
          method to validate args, assign defaults, etc.).
      [2] array of lines representing formatted documentation for all args.
      [3] the attribute to use when reporting location of generated code.
    """
    args = []
    typechecks = []  # always empty for Java
    doclines = []

    info_list, params = self._extractParams(construct)
    for info in info_list:
      arg_parts = [info['type'], info['name']]
      if 'default' in info:
        arg_parts.extend(['=', info['default']])
      args.append(' '.join(arg_parts))
    attribute = params if params.value() else construct.primary()

    return args, typechecks, doclines, attribute

  def _formatImports(self, imports, formatted=False):
    """Create base-language import statements given a list of
    special meta-level import specifications.

    Args:
      imports: list of str
        The imports to format.  Normally, this is a baselang-independent
        format encoding relevant information suitable for passing to
        BaseLanguageOopl._parseEncodedImports(), but if formatted==True,
        each line is a valid import statement in the baselang in question.
      formatted: bool
        Controls how imports are interpreted. See 'imports' above.
    Returns: str
      A multi-line list of python import statements.
    """
    lines = []
    data = self._parseEncodedImports(imports)
    merged = sorted(
      data['core'] + data['complex'] + data['full'], key=lambda v: v['base'])
    for item in merged:
      line = 'import ' + item['base']
      if 'sub' in item:
        line += '.' + item['sub']
      if 'cls' in item:
        line += '.' + item['cls']
      else:
        line += '.*'
      line += ';'
      lines.append(line)
    return '\n'.join(sorted(set(lines)))

  def metaTypeToBase(self, metatype):
    """Convert a metatype to its base-language equivalent.

    Args:
      metatype: Type
        The type to convert.

    Returns: str
    """
    if not metatype.isValid():
      result = metatype.raw()
      if result.startswith('{#') and result.endswith('#}'):
        result = result[2:-2].replace('_', ' ')
    else:
      pmap = {'@': 0, '*': 1, '&': 2}
      m = self.METATYPES
      prefixes = list(metatype.prefix() or '')
      n = len(prefixes)
      raw = metatype.raw()
      base = metatype.base()
      params = metatype.params()
      ptref_const = False
      obj_const = False
      if n and prefixes[-1] == '#':
        obj_const = True
        prefixes.pop()
        n -= 1
      if n and prefixes[0] == '#':
        ptref_const = True
        prefixes.pop(0)
        n -= 1
      if n > 1:
        raise meta.compiler.errors.Error('Unable to convert %s to Java' % raw)
      else:
        if n == 1:
          mark = prefixes[0]
        else:
          # n == 0: The meta type is missing explicit indicator ... the default
          # we use depends on whether the base type is primitive or not.
          mark = '@' if base in Type.PRIMITIVE else '*'
        idx = pmap[mark]
      if base in m:
        # primitive or native type
        mb = m[base]
        if isinstance(mb, dict):
          # This type expects an integer arg
          arg = int(params[0]) if params else 32
          while arg not in mb:
            arg += 1
            if arg > 64:
              raise meta.compiler.errors.Error('No java type for meta %s' % raw)
          mb = mb[arg]
        result = mb[min(len(mb)-1, idx)]
      else:
        # class type - @, & and a single '*' all map to the raw class type.
        result = base
      if ptref_const:
        result = 'final ' + result
      if obj_const:
        # Java has no clean way to deal with this ... do we wrap the class
        # in a special readonly delegator?
        pass
    return result

  def groupBaseFiles(self, filelist):
    """Group and sort a list of baselanguage files.

    Args:
      filelist: list of str
        The files to sort.

    Returns: list of list of str
      Each element is a group of paths from filelist that are to be displayed
      together (usually on the same row where possible).
    """
    # We want to group path/File.java and path/FileTest.java into a group.
    groups = collections.OrderedDict()
    pre = re.compile('(.*?)((?:Test)?\.java)$')
    for path in sorted(filelist):
      match = pre.match(path)
      if match:
        basepath, suffix = match.groups()
        groups.setdefault(basepath, []).append(path)
      else:
        groups[path] = [path]
    result = []
    for group, sublist in groups.iteritems():
      result.append(sublist)
    return result


class OoplPython(BaseLanguageOopl):
  """The Python Base Language.

  Python is different than most other languages in that it does not usually
  follow a one-class-per-file paradigm. Instead, related classes are grouped
  into modules, and one often works at the level of modules instead of
  (or in addition to) the level of classes.

  The following enumerates various ways we can implement meta semantics in
  Python, with pros (+), cons (-) and notes (~).

  (1) Produce a single python file for each class and be done with it.
       - Python modules make importation very easy, and forcing a
         one-class-per-module paradigm on python would break a useful
         feature.

  (2) Produce a single python file for each class initially (appropriately
      located in the file hierarchy based on its namespace), then merge
      all classes in the same module into a single module file (either
      via a simple 'cat', or something more intelligent that places all
      imports at the top, etc.
       + we can maintain module support in python
       + having the per-class files may come in handy at some point.
       - merging the per-class files into a module file will get messy,
         especially if we want to provide support for module-level
         templates.
       - dealing with multiple import commands and style guides will get
         messy.
       - how do we know what order to add the classes making up a module
         into the module file?  For those classes within a given .meta
         file, we can rely on the order specified therein, but across
         multiple .meta files, what is the order?  Especially given that
         we won't always have all the .meta files, and will instead need
         to rely on the simple fact "all files in this directory are to
         be formed into a single module".
       - where do the non-class top-level constructs get placed in the
         per-class files?
       ~ we could use some special __init__.py magic to import all
         per-class files into a module.  This wouldn't give the user
         a readable low-level python module file, but it may (or may not)
         make module-level file creation easier.

  (3) Assume that each .meta file describes the classes belonging to the
      same module.
       - this would mean requiring all classes in the same *.meta file to
         have the same namespace, something that is not necessary in
         any other base language, and which limits the power of Meta for
         those base languages.
       + would make module-level file creation quite easy to implement

  (4) Require that, if one wants to compile into python, all classes
      in the same namespace (aka module) must be placed in the same .meta
      file (although classes from multiple namespaces may reside in the
      same such file).
       - this limits the flexibility of Meta for Python (cannot define
         classes within the same module in different .meta files)
       - this limits all other base languages in that they must define all
         classes from the same namespace in the same .meta file when
         without Python this constraint wouldn't be necessary.  Although
         it makes sense to define all classes from the same namespace in
         the same file, and this will be a common idiom, we don't want to
         *require* this, as sometimes a class from one namespace is more
         usefully stored elsewhere ... or not.
       ~ the fact that meta files can store multiple classes begs the
         question of how one finds class definitions in Meta files.  It
         may very well make sense to say that a file in $root/a/b/c.meta is
         defining classes in namespace a.b.c. In which case 3) above would
         be the strategy to use.  On the other hand, one can simply store
         all .meta files in the same directory (or directory hierarchy)
         and perform a simple
           % grep 'class <name>' $(find . -name '*.meta')
         to find where a class is defined, so maybe discoverability isn't
         an issue (Meta will almost certainly need to create a "manifest"
         mapping full-path class names to meta source files (and vice-versa)
         anyways, so discoverability once again isn't an issue.

  (5) Respect the namespace of each class found in a *.meta file, and
      write the data for classes belonging to a module to the appropriate
      file as part of compilation:
       - This requires whole-program compilation (or at least access to
         whole program meta-info). Suppose we only parse a single .meta
         file that defines three classes in the same namespace ... but
         there is another .meta file elsewhere that defines another five
         classes in the same namespace. By only compiling the one .meta
         file, we will either end up not having 5 classes (if we empty the
         module file before writing), or will need to do python-level
         parsing of the module file to determine which classes to replace
         and which to keep. This latter strategy is at least viable (the
         former is not), but is rather messy.
       - If multiple .meta files specify classes in the same module, how
         do we know what order the classes should be added to the module
         in?  We would need to provide additional syntax just for Python
         to deal with this.

  (6) Introduce a 'namespace' construct to Meta.  There are two variants
      of this approach:
       (a) Require all class definitions for a particular namespace to
           reside lexically within the namespace.
       (b) Allow classes to lexically appear anywhere and indicate which
           namespace they belong to via a 'within' secondary attribute.
      For both
       + Logical place exists to put module-level comments (define a
         'comment' attribute on 'namespace'), module-level unit testing
         support (define a 'test' attribute on 'namespace'), etc.
      For (a)
       - This gets cumbersome when we have nested namespaces
       + Makes it easy to determine which classes belong to which namespace,
         as they are all lexically present.
      For (b)
       - This makes finding all classes that reside in a particular
         namespace complicated ... whole world analysis is required.

  We are currently implementing (6)(a).  For now, we assume that the File
  always contains namespace constructs (no free-floating classes, no
  classes whose 'within' attribute doesn't match the namespace it is
  lexically within.  We will explore how to generalize this to (6)(b)
  in the future.
  """

  CONFIG = {
    'self': 'self',
    'cls': 'cls',
    # TODO(wmh): Should the meta-to-base mapping be a subdict within
    # config, something else entirely, etc.?
    'null': 'None',
    'true': 'True',
    'false': 'False',

    'rem': '#',
    'rem_start': None,
    'rem_end': None,

    'empty_scope_segment': ['pass'],
    'class_primary': False,
    'initializer_name': '__init__',
  }

  # In python, which lacks static typing, this type mapping isn't as important
  # as in languages with static typing, but it is a useful reminder of how
  # things map.
  METATYPES = {
    'bool': 'bool',
    'char': 'str',
    'int': {
      8: 'byte',
      16: 'short',
      32: 'int',
      64: 'longlong',
    },
    'uint': {
      7: 'byte',
      15: 'short',
      31: 'int',
      63: 'long',
    },
    'real': {
      32: 'float',
      64: 'double',
    },
    'istr': 'str',
    '&#str': 'str',
    '*#str': 'str',
    'str': 'std::string',
    'vec': 'std::vector<%(T1)s>',
    'map': 'std::map<%(T1)s,%(T2)s>',
  }


  # See 'import keywords; keyword.kwlist'
  KEYWORDS = [
    'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif',
    'else', 'except', 'exec', 'finally', 'for', 'from', 'global', 'if',
    'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'print', 'raise',
    'return', 'try', 'while', 'with', 'yield']

  def __init__(self, metalang):
    super(OoplPython, self).__init__(
      metalang, 'python', 'Python', ['py'], OoplPython.CONFIG)

  def compileMisc(self, metafile):
    print 'Is there anything to do in compileMisc() for python?'

  def compile_field(self, metafile, construct, config):
    """Compile a field construct.

    Args:
      metafile: MetaFile
      construct: Construct
      config: dict
    """
    context = metafile.context()
    class_construct = construct.findAncestor('class')
    class_name = class_construct.id()
    pyrem = self.config('rem')

    # The 'select' feature attribute determines which accessors to generate,
    select = construct.attrval('select', defattr=LOOKUP, context=context)
    # The 'visibility' feature attribute determines the visibility of
    # these accessor methods (and the visibility of the field itself),
    visibility = construct.attrval(
      'visibility', defattr=LOOKUP, context=context)
    # The 'level' feature attribute establishes whether the field and
    # accessors are defined on the class or its meta-class.  In languages
    # with support for both in the same class, the methods are defined
    # where appropriate for the language.
    level = construct.attrval('level', defattr=LOOKUP, context=context)
    # The 'status' feature attribute specifies whether the field is optional.
    # If so, the accessor code changes significantly, in order to support
    # this semantics.  An optional field must always be visibility private.
    status = construct.attrval('status', defattr=LOOKUP, context=context)
    # The 'mutability' feature attribute specifies whether the field can be
    # modified.  Do not currently remember why this is necessary, as the type
    # should be sufficient for this ...
    mutability = construct.attrval('mutability', defattr=LOOKUP, context=context)
    # The 'type' secondary attribute specifies the meta type of the field.
    try:
      type_ = construct.attrval('type', defattr=LOOKUP, context=context)
    except meta.compiler.errors.RequiredAttributeValue:
      primary = construct.primary()
      print '****** HERE with primary %s at line %d' % (
        primary.keyStr(), primary.line())
      raise

    # The 'default' secondary attribute specifies the initial value of the
    # field (or, if optional, what to return when the field doesn't exist).
    # It is often not present, but if status is 'optional', it is required.
    default = construct.attrval('default', defattr=LOOKUP, context=context)
    default = self.CONFIG.get(default, default)
    # default = self.consattr(construct, 'default', missing=None)

    streams = metafile.streams()
    primary = construct.attr('field')
    # TODO(wmh): The difference between field and _field should be under user
    # control.  Currently, it is dictated by context['field_prefix'] and
    # context['field_suffix'], so as long as those values can be specified by
    # the user we are good.  An alternative (probably not as good) would be to
    # have a template<python-field> attached to the field construct.
    rawfield = self.rawField(construct.id(), context)
    if select == 'raw' and visibility == 'public':
      rawfield = construct.id()

    if level == 'instance':
      # Instance fields are initialized in __init__.
      # Receiver is 'self'
      decorator = ''
      receiver = self.config('self')
    elif level == 'meta':
      # Meta fields are written verbatim to the method stream.
      # Accessors have a @classmethod decorator
      # Receiver is 'cls'
      decorator = '@classmethod'
      receiver = self.config('cls')
      comments = [
        '',
        '# field %s: %s' % (rawfield, type_),
      ]
      comment, _ = self.consattr(construct, 'comment')
      if comment:
        for ln in comment.value():
          ln = ln.replace('/# ', '#   ')
          if not ln.startswith('# '):
            ln = '#   ' + ln
          comments.append(ln)
      for line in comments:
        streams.addLine('methods', line)
      streams.addLine('methods', '%s = %s' % (rawfield, default))

    elif level == 'static':
      # Static fields are written verbatim to the method stream.
      # Accessors have a @staticmethod decorator
      # Receiver is the name of the class in which this field is defined.
      decorator = '@staticmethod'
      receiver = class_construct.id()
    else:
      raise meta.compiler.errors.InternalError('Invalid level %s' % level)

    typechecking = False  # TODO(wmh): Add to config or some such?
    protected_typecheck = 'meta.isProtected(self)'
    private_typecheck = 'meta.isPrivate(self)'
    vistype_map = None

    # TODO(wmh): It may not even be worth implementing visibility
    # verification in Meta(Python). We might be able to just compile
    # the stubs into Java or C++ and rely on those compilers to detect
    # visibility violations, no? Need to verify that this is actually
    # true in a program consisting only of stubs ... I don't think it will.
    if typechecking:
      # must define all possible visiblities for all poassible acctypes.
      # This allows us to be more efficient about accessing, and reporting
      # errors when something is missing.
      vistype_map = {
        'get': {
          'pubget': '',
          'protected': protected_typecheck,
          'private': private_typecheck,
         },
        'set': {
          'pubget': protected_typecheck,
          'protected': protected_typecheck,
          'private': private_typecheck,
         },
        'ref': {
          'pubget': protected_typecheck,
          'protected': protected_typecheck,
          'private': private_typecheck,
         },
      }

    varset = VarSet()
    varset.addVar('decorator', decorator)
    varset.addVar('receiver', receiver)
    varset.addVar('rawfield', rawfield)
    varset.addVar('typecheck', '')

    # Obtain the scope
    # TODO(wmh): Replace all calls to construct.scope() with self.consattr()
    scope = construct.scope(context=context, default=EMPTY)
    meta_construct = self.metaConstruct(construct)

    #! select raw 100%
    #! select get 100%
    #! select set 100%
    #! select ref 100%
    #! select getset 100%
    #! select getref 100%
    #! select setref 100%
    #! select getsetref 100%
    if 'get' in select:
      preamble = []
      typecheck = vistype_map['get'][visibility] if typechecking else None
      if typecheck:
        preamble.append(typecheck)
      if status == 'optional':
        default = self.consattr(construct, 'default', missing=None)
        if default is None:
          raise meta.compiler.errors.Error(
            'Must provide a default value for optional fields')
        code = [
          "return self.__dict__.get('%s', %s)" % (rawfield, default)]
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields')
      else:
        code = ['return self.%s' % rawfield]
      self._compileAccessor(
        metafile, construct, scope, 'get', varset, code, preamble=preamble)

    if 'set' in select:
      preamble = []
      typecheck = vistype_map['set'][visibility] if typechecking else None
      if typecheck:
        preamble.append(typecheck)
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Python')
      else:
        code = ['self.%s = value' % rawfield]
      self._compileAccessor(
        metafile, construct, scope, 'set', varset, code, preamble=preamble)

    if 'ref' in select:
      preamble = []
      typecheck = vistype_map['ref'][visibility] if typechecking else None
      if typecheck:
        preamble.append(typecheck)
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Python')
      else:
        code = ['return self.%s' % rawfield]
      self._compileAccessor(
        metafile, construct, scope, 'ref', varset, code, preamble=preamble)

    # TODO(wmh): Add code for generating test methods for accessors.

  # ----------------------------------------------------------------------
  # Template method instantations.

  def _augmentConstruct(self, metafile, construct, config):
    """Invoked before the construct-specific compile_* method.

    This template method allows individual baselanguages to perform any
    modifications of the parse tree necessary before rendering (for example,
    some baselanguages might require certain implicit methods be added,
    etc.)

    Args:
      metafile: MetaFile
        ...
      construct: Construct
        The construct to potentially modify
      config: dict
        Configuration information.

    Returns: dict or None
      A modified version of config, or None (meaning no modification of
      config needed).
    """
    kind = construct.kind()
    if kind == 'namespace':
      # In python, we need to ensure that __init__.py files exist in every
      # directory to be treated as a python package. Note that Meta namespaces
      # represent BOTH python packages and python modules, and we need a means
      # of identifying whether a particular namespace represents a package
      # (i.e. the namespace has sub-namespaces within it) or a module (the
      # namespace has no sub-namespaces and is terminal). We only want to create
      # __init__.py files for packages, not modules.
      #
      # We do this as follows.
      #  - When we see a namespace (for example, a.b), we assume it represents
      #    a module.  As such, we ensure that a/__init__.py exists, but assume
      #    that we will be creating a/b.py, not a/b/*.
      #  - If a subsequent invocation of this method gives us the namespace
      #    a.b.c, we have proof that a.b is a package, not a module, and we
      #    ensure that both a/__init__.py and a/b/__init__.py exist, but
      #    we do NOT create a/b/c or a/b/c/__init__.py (until we see a
      #    namespace a.b.c.d).
      #  - If a/b.py exists and we see namespace a.b.c, we have a situation
      #    where a.b is both a package and a module. We can handle this by
      #    either moving a/b.py to a/b/__init__.py, or symlinking
      #    a/b/__init__.py to a/b.py. Since windows doesn't support symlinking,
      #    the former (moving a/b.py to a/b/__init__.py) may be best, but
      #    requires special logic wherever a/b.py was originally written (to
      #    detect situations where a/b/ exists, and to instead write to
      #    a/b/__init__.py in such situations).
      
      namespace = construct.id()
      topdir = metafile.metapath(self)

      subpaths = namespace.split('.')
      lastpath = subpaths.pop()

      # We know that all the internal subpaths represent directories, so we
      # create those directories and __init__.py within them.
      tdir = topdir
      for subpath in subpaths:
        tdir = os.path.join(tdir, subpath)
        init_path = os.path.join(tdir, '__init__.py')
        if not IO.exists(tdir):
          IO.makedirs(tdir, 0700)
        if not IO.exists(init_path):
          pfile = tdir + '.py'
          if IO.exists(pfile):
            # We have a situation where a namespace is being used as both a
            # package and a module.
            raise Error(
              'Not yet handling situations where a namespace is both a package '
              'and a module')
            # We need to move pfile to init_path, and add logic to whereever
            # .py module files are written such that we write to the
            # dir/subfile/__init__.py file instead of dir/subfile.py when
            # dir/subfile exists as a directory.  See the notes in 
            # BaseLanguageOopl._compileNamespace() for
          else:
            # We create an empty __init__.py file.
            fp = IO.writer(init_path)
            # Nothing to write, just need the file to exist.
            IO.close(fp)
            Log.info('Wrote %s', init_path)

      # NOTE: We are assuming that 'lastpath' represents a module, so we don't
      # do anything with it for now.  Only if we obtain proof that it is
      # actually a package (by seing 'lastpath' as an internal part of a longer
      # namespace) will we create a directory for it.

  def _augmentVarset(self, construct, varset, activity, data):
    """Modify the varset for a given construct.

    This method is invoked by the default implementations of various compile_*
    methods as a means of providing subclasses a degree of customization without
    requiring a full overriding of the methods themselves. Since the varsets are
    the culmination of what is needed to instantate a template representing
    baselanguage code, by providing a hook into base-language specific
    functionality before we instantiate the varset, subclasses should have a fair
    amount of flexibility in how they customize things.

    Args:
      construct: Construct
        The construct being compiled.
      varset: VarSet
        The varset that has been formed, ready for use in instantiating a template.
      activity: str
        A conceptual indicator of what activity is being performed.  Examples:
         'class': compiling a class
         'method': compiling a user-defined method (in either source or test class)
         'test-method': compiling a auto-generated test-method
      data: dict
        A mapping of variable to value, for variables a subclass implementation
        might find useful.
    """
    if activity == 'namespace':
      pass

    elif activity == 'class' or activity == 'class-by-namespace':
      spec = ''
      _, extends = self.consattr(construct, 'extends', missing=LOOKUP)
      if extends:
        spec += ' extends ' + extends
      _, implements = self.consattr(construct, 'implements', missing=LOOKUP)
      # implements is a LiteralList.
      implements_list = implements.data()
      if implements_list:
        spec += ' implements ' + ', '.join(implements)
      varset.addVar('parent_spec', spec)

    elif activity == 'method':
      level = data['level']
      if level == 'instance':
        modifiers = ''
      elif level == 'meta':
        modifiers = '@classmethod'
      elif level == 'static':
        modifiers = '@staticmethod'
      else:
        raise meta.compiler.errors.InternalError(
          'Invalid level %s' % level)
      varset.interpolate('modifiers', {'modifiers': modifiers})

    elif activity == 'test-method':
      pass

    else:
      super(OoplPython, self)._augmentVarset(construct, varset, activity, data)

  def _superSyntax(self, construct, class_name, method_name, args):
    """Return the syntax used to invoke the parent definition of method.

    Args:
      construct: Construct
        The construct (method or initializer) needing the super syntax.
      class_name: str
        Name of class
      method_name: str
        Name of method
      args: list of str
        The args to pass to the super call.

    Returns: str
      Valid base-language syntax for invoke a parent method.
    """
    super_method = 'super(%s, %s).%s' % (
      class_name, self.config('self'), method_name)
    args_str = ', '.join(args)
    # TODO(wmh): Generalize the 76 to "method indentation depth"
    if len(super_method) + len(args_str) + 2 < 76:
      super_call = super_method + '(' + args_str + ')'
    else:
      super_call = super_method + '(' + '\n  ' + args_str + ')'
    return super_call

  def docstr(self, context, construct, indent=0, add_params=False,
             default_lines=None):
    """Format the comment attribute of a construct as a python doc-string.

    Args:
      context: Context
      construct: Construct
      indent: int
      add_params: bool
        If True, we are generating a method docstr, and are to add a 'Args:'
        section to the doc string for each  parameter, and a 'Returns:' section
        for the return value.
      default_lines: list of str or None
        The list of lines to use as the docstr if one doesn't exist.
        None means don't add one.

    Returns: two-tuple
      [0] str
        Note that this value never has a trailing newline (and callers rely on
        this fact).
      [1] Attribute (the 'comment' attribute from which docstr was obtained)
    """
    comment, _ = self.consattr(construct, 'comment')
    if comment:
      # We form a string representing documentation for each argument.
      docstr = comment.asStr(
        context, strip_comments=True, indent=indent,
        default_lines=default_lines)
      if docstr:
        # Parse parameters, if we are forming the docstr for a method (controlled
        # by 'add_params')
        arglines = []
        if add_params:
          params = construct.attr(
            'params', default=EMPTY, context=context, param=self.id())
          for param in params.value():
            name = param.id()
            type_ = param.attrval('type', defattr=LOOKUP, context=context)
            arglines.append('%s: %s' % (name, self.metaTypeToBase(type_)))
            arg_comment = param.attrval(
              'comment', defattr=EMPTY, context=context)
            if arg_comment:
              try:
                arglines.extend(['  ' + l[3:] for l in arg_comment])
              except TypeError:
                print '%s has param %s with %s' % (
                  construct.kindid(), name, str(arg_comment))
          if arglines:
            docstr += '\nArgs:\n' + '\n'.join(['  ' + l for l in arglines])

          return_type = construct.attr('returns', default=None, context=context)
          if return_type:
            if arglines:
              docstr += '\n'
            docstr += '\nReturns: %s\n' % self.metaTypeToBase(
              return_type.value())
            return_comment = construct.attr(
              'returns_', default=None, context=context)
            if return_comment:
              retlines = return_comment.value()
              docstr += '\n'.join(['  ' + l[3:] for l in retlines])

        com_delim = '\"\"\"'
        if '\n' not in docstr.strip():
          # One-line comments should not have a newline.
          docstr = docstr[:indent] + com_delim + docstr.strip() + com_delim
        else:
          # Multi-line comments have their end-of-string syntax on a separate
          # line below the comment text.
          dent = docstr[:indent]
          docstr = dent + com_delim + docstr.strip() + '\n' + dent + com_delim
      else:
        docstr = ''
    else:
      docstr = ''
    return docstr, comment

  def formatParams(self, construct, metafile, invocation=False, level=None):
    """Obtain the python syntax for parameters from construct.

    Args:
      construct: Construct
      metafile: MetaFile
      invocation: bool
        If True, format args for invocation, not declaration.  This
        means the receiver is not added at the beginning, and
        default values are not included, just the var names.
      level: str or None
        One of 'instance', 'static' or 'meta', or None.  If None, it
        means we should NOT add the level-dependent receiver arg to the
        args list (this is useful, for example, when formatting a call to
        super().method(args).  This is usually NOT None though, and the fact
        that this is a keyword arg should not let one think that the default
        value is usually the correct one ... only in rare circumstances is it.

    Returns: tuple
      [0] str (the parameter listing itself)
      [1] list of str (type checking code to insert at the beginning of the
          method to validate args, assign defaults, etc.).
      [2] array of lines representing formatted documentation for all args.
      [3] the attribute to use when reporting location of generated code.
    """
    # TODO(wmh): Rewrite this using self._extractParams.

    # TODO(wmh): Clean up the partial redundancy between 'invocation' and
    # 'level' == None.

    args = []
    typechecks = []
    doclines = []

    context = metafile.context()

    if not invocation:
      # Add iniital receiver arg (but only if we aren't generated for
      # invocation).
      if level is None:
        # We are not supposed to add the receiver to the args listing.
        pass
      elif level == 'instance':
        args.append(self.config('self'))
      elif level == 'meta':
        args.append(self.config('cls'))
      elif level == 'static':
        pass
      else:
        raise meta.compiler.errors.InternalError('Invalid level %s' % level)

    # TODO(wmh): Provide type-checking support.
    params = construct.attr(
      'params', default=EMPTY, context=context, param=self.id())
    in_keywords = False
    for param in params.value():
      name = param.id()
      type_ = param.attrval('type', defattr=LOOKUP, context=context)
      type_base = type_.base()
      kind = param.attrval('kind', defattr=LOOKUP, context=context)
      comment = param.attrval('comment', defattr=EMPTY, context=context)

      doclines.append('%s: %s' % (name, type_.raw()))
      if comment:
        doclines.extend(['  ' + l for l in comment])

      # Some initial vararg parsing.
      #   - only two types are currently supported, 'vec' and 'map'
      vararg_str = None
      if kind == 'vararg':
        if type_base == 'vec':
          vararg_str = '*' + name
        elif type_base == 'map':
          vararg_str = '**' + name
        else:
          raise meta.compiler.errors.InvalidType(
            'vararg %s has invalid type %s' %
            (param.kindid(), str(type_)))

      # We want to check whether a 'default' exists or not, before asking
      # for attrval, because there is currently no way to distinguish between
      # an empty default string and a non-existent default.
      defattr = param.attr('default', default=None, context=context)
      if defattr:
        if kind == 'vararg':
          raise meta.compiler.errors.Error(
            'vararg %s cannot have a default' % param.kindid())
        default = self.baseValue(defattr.valueStr())
        # Depending on type, we need to wrap default in single or double quotes,
        # etc.  We really need the Type class defined to do this properly, but
        # for now we hack it up.
        #
        # TODO(wmh): type should by a Type instance, and the conversion code
        # can maybe be written on it (or at least facilitated).
        if False and type_base == 'str':
          if default.find('\"') == -1:
            default = '"%s"' % default
          elif default.find("'") == -1:
            default = "'%s'" % default
          else:
            default = "'%s'" % default.replace("'", "\\'")
        # Indicate that we've started parsing keywords, not positionals
        in_keywords = True

        if invocation:
          args.append(name)
        else:
          args.append(name + '=' + default)
      elif kind == 'vararg':
        # Special case.
        name_str = vararg_str
        args.append(name_str)
      else:
        default = None
        if in_keywords:
          # We encountered a positional arg after a keyword arg.  This should
          # be validated during parsing, not during compilation, but for now
          # we report it here.
          raise meta.compiler.errors.Error(
            'Line %d: Found positional %s after keyword param encountered' %
            (param.primary().line(), name))
        else:
          if kind == 'vararg':
            name_str = vararg_str
          else:
            name_str = name
          args.append(name_str)

    # If there is no params attribute, we use the primary attribute instead.
    # TODO(wmh): Should we just use None instead of the primary attribute?
    attribute = params if params.value() else construct.primary()

    return args, typechecks, doclines, attribute

  def _formatImports(self, imports, formatted=False):
    """Create base-language import statements given a list of
    special meta-level import specifications.

    Args:
      imports: list of str
        The imports to format.  Normally, this is a baselang-independent
        format encoding relevant information suitable for passing to
        BaseLanguageOopl._parseEncodedImports(), but if formatted==True,
        each line is a valid import statement in the baselang in question.
      formatted: bool
        Controls how imports are interpreted. See 'imports' above.
    Returns: str
      A multi-line list of python import statements.
    """
    lines = []
    if formatted:
      # imports are already in python format, but may involve redundancy.
      # TODO(wmh): this can contain multiple streams of basic imports and
      # 'from' imports and various other things. we want to partition
      # into "core python modules" and "user space".

      # This sorts all froms above all basic imports.
      partial_sort = sorted(set(imports))
      imp_index = None
      for i in range(0, len(partial_sort)):
        if partial_sort[i].startswith('import '):
          imp_index = i
          break
      if imp_index is None:
        lines = partial_sort
      else:
        lines = partial_sort[imp_index:] + partial_sort[:imp_index]
      import_text = '\n'.join(lines).lstrip()
    else:
      data = self._parseEncodedImports(imports)
      for item in data['core']:
        line = 'import ' + item['base']
        if 'alias' in item:
          line += ' as ' + item['alias']
        lines.append(line)
      merged = sorted(data['complex'] + data['full'], key=lambda v: v['base'])
      if merged:
        lines.append('')
      for item in merged:
        if 'sub' in item:
          line = 'from %s import %s' % (item['base'], item['sub'])
        else:
          line = 'import %s' % item['base']
        if 'alias' in item:
          line += ' as ' + item['alias']
        lines.append(line)
      import_text = '\n'.join(lines)
    return import_text

  # TODO(wmh): Generalize BASETYPES so that it can be used for other languages
  # and this method can be moved up the hierarchy?
  BASETYPES = {
    'str': '&str',
    'int': '@int',
    'bool': '@bool',
    'boolean': '@bool',
    'float': '@float',
    'double': '@double',
    'list': '*vec',
    'dict': '*map',
  }

  METATYPES = {v: k for (k,v) in BASETYPES.iteritems()}
  BASE_RE = re.compile('(list|dict) of (.*)')
  SIMPLE_RE = re.compile('^[a-zA-Z0-9_.]+$')

  def baseTypeToMeta(self, basetype, metac):
    """Convert a python type to a meta type.

    Args:
      basetype: str
        The type to convert.  Note that python types are informal, so we
        make some heuristics for converting, support explicit user-provided
        type mapping, and report warnings for unknown basetypes.
      metac: Compiler
        The Compiler instance that stores the typemap.

    Returns: str
    """
    metac._typecnt.setdefault(basetype, 0)
    metac._typecnt[basetype] += 1
    typemap = metac.typemap()
    basetypes = self.BASETYPES
    # TODO(wmh): Isn't it redundant to have both Compiler.typemap() and
    # self.BASETYPES?  Let's pick one or the other.
    result = typemap.get(basetype, basetypes.get(basetype, None))
    if not result:
      base_match = self.BASE_RE.match(basetype)
      if base_match:
        result = '%s<%s>' % (
          basetypes[base_match.group(1)],
          self.baseTypeToMeta(base_match.group(2), metac))
      else:
        simple_match = self.SIMPLE_RE.match(basetype)
        if simple_match:
          # A simple capitalized identifier ... we assume it is a class, and
          # add a pointer.
          # TODO(wmh): Do we want to add '.' to SIMPLE_RE?
          result = '*' + basetype
        else:
          # TODO(wmh): We are using '{#' and '#}' as base-lang escape syntax
          # in multiple places ... need to merge all references into one, and
          # allow the start/end delimiters to be under user control.
          result = '{#' + basetype.replace(' ', '_') + '#}'
        # Remember that metatypes never have spaces, so we must replace them.
    return result

  def metaTypeToBase(self, metatype):
    """Convert a metatype to its base-language equivalent.

    Args:
      metatype: Type
        The type to convert.

    Returns: str
    """
    #print 'Here with %s [%s and %s]' % (metatype.raw(), metatype.prefix(), metatype.base())
    if not metatype.isValid():
      result = metatype.raw()
      if result.startswith('{#') and result.endswith('#}'):
        result = result[2:-2].replace('_', ' ')
    else:
      core = (
        (metatype.prefix() or '') +
        (metatype.base() or '?'))
      if core.startswith('{#') and core.endswith('#}'):
        result = core[2:-2].replace('_', ' ')
      else:
        result = self.METATYPES.get(core, None)
        if result:
          params = metatype.params()
          if params:
            param_strs = [self.metaTypeToBase(p) for p in metatype.params()]
            result += ' of ' + '/'.join(param_strs)
        else:
          result = metatype.raw().lstrip('*')
    return result

  def groupBaseFiles(self, filelist):
    """Group and sort a list of baselanguage files.

    Args:
      filelist: list of str
        The files to sort.

    Returns: list of list of str
      Each element is a group of paths from filelist that are to be displayed
      together (usually on the same row where possible).
    """
    # We want to group file.py and file_test.py into a group.
    groups = collections.OrderedDict()
    pre = re.compile('(.*?)((?:_test)?\.py)$')
    for path in sorted(filelist):
      match = pre.match(path)
      if match:
        basepath, suffix = match.groups()
        groups.setdefault(basepath, []).append(path)
      else:
        groups[path] = [path]
    result = []
    for group, sublist in groups.iteritems():
      result.append(sublist)
    return result

  def _moduleToMeta(self, baselang, output, module_path, indent=''):
    """Convert a python module to Meta source code.

    Args:
      output: Output
        The lines making up the Meta source code.
      module_path: str
        Path to the module
      indent: str
        Amount of indentation before each meta source line.
    """
    # TODO(wmh): Fix this!  Need to implement Antlr in python.
    module = python.Module.New(module_path)
    compiler = self.metalang().compiler()
    module.toMeta(compiler, baselang, output, indent=indent)


class OoplPerl(BaseLanguageOopl):

  CONFIG = {
    'self': 'self',
    'cls': 'cls',
    # TODO(wmh): Should the meta-to-base mapping be a subdict within
    # config, something else entirely, etc.?
    'null': 'undef',
    'true': 'true',     # if we add  "use constant true  => 1;"
    'false': 'false',   # if we add  "use constant false => 0;"

    'rem': '#',
    'rem_start': None,
    'rem_end': None,

    'empty_scope_segment': [],
    'initializer_name': '_meta_init',
    'class_primary': True,
  }

  # See http://learn.perl.org/docs/keywords.html
  KEYWORDS = [
    # Perl syntax
    'CORE', '__DATA__', '__END__', '__FILE__', '__LINE__', '__PACKAGE__',
    'and', 'cmp', 'continue', 'do', 'else', 'elsif', 'eq', 'exp', 'for',
    'foreach', 'ge', 'gt', 'if', 'le', 'lock', 'lt', 'm', 'ne', 'no', 'or',
    'package', 'q', 'qq', 'qr', 'qw', 'qx', 's', 'sub', 'tr', 'unless',
    'until', 'while', 'xor', 'y'

    # Perl functions

    # '-A', '-B', '-C', '-M', '-O', '-R', '-S', '-T', '-W', '-X', '-b', '-c', '-d', '-e', '-f', '-g', '-k', '-l', '-o', '-p', '-r', '-s', '-t', '-u', '-w', '-x', '-z',
    # TODO(wmh): Do we want to be selective about which of these are considered
    # keywords?  Things like 'my' and 'bless' are obviously conceptually
    # thought of by programmers as "reserved words".
    'AUTOLOAD', 'BEGIN', 'CHECK', 'DESTROY', 'END', 'INIT', 'UNITCHECK',

    # Core
    'bless',
    'break',
    'chr',
    'defined',
    'delete',
    'die',
    'each',
    'eof',
    'eval',
    'exec',
    'exists',
    'exit',
    'int',
    'sort',
    'splice',
    'split',
    'substr',
    'undef',
    'wantarray',


    # Math
    'abs',
    'atan2',
    'cos',

    'accept',
    'alarm',

    # Sockets and networks
    'bind',
    'connect',
    'binmode',
    'endgrent',
    'endhostent',
    'endnetent',
    'endprotoent',
    'endpwent',
    'endservent',
    'getgrent',
    'getgrgid',
    'getgrnam',
    'gethostbyaddr',
    'gethostbyname',
    'gethostent',
    'getlogin',
    'getnetbyaddr',
    'getnetbyname',
    'getnetent',
    'getpeername',
    'getpgrp',
    'getppid',
    'getpriority',
    'getprotobyname',
    'getprotobynumber',
    'getprotoent',
    'getpwent',
    'getpwnam',
    'getpwuid',
    'getservbyname',
    'getservbyport',
    'getservent',
    'getsockname',
    'getsockopt',
    'setgrent',
    'sethostent',
    'setnetent	',
    'setpgrp',
    'setpriority',
    'setprotoent',
    'setpwent',
    'setservent',
    'setsockopt',

    # Miscellaneous
    'caller',

    # Files
    'chdir',
    'chmod',
    'chown',
    'chroot',
    'fcntl',
    'fileno',
    'flock',
    'getc',
    'syscall',
    'sysopen',
    'sysread',
    'sysseek',
    'system',
    'syswrite',
    'tell',
    'telldir',
    'close',
    'closedir',

    # String manipulation
    'chomp',
    'chop',

    'crypt',
    'dbmclose',
    'dbmopen',

    'dump',

    'fork',

    'format',
    'formline',

    'glob',
    'gmtime',

    'goto',

    'grep',

    'hex',

    'index',
    'ioctl',
    'join',
    'keys',
    'kill',
    'last',
    'lc',
    'lcfirst',
    'length',
    'link',
    'listen',
    'local',
    'localtime',
    'log',
    'lstat',
    'map',
    'mkdir',
    'msgctl',
    'msgget',
    'msgrcv',
    'msgsnd',
    'my',
    'next',
    'not',
    'oct',
    'open',
    'opendir',
    'ord',
    'our',
    'pack',
    'pipe',
    'pop',
    'pos',
    'print',
    'printf',
    'prototype',
    'push',
    'quotemeta',
    'rand',
    'read',
    'readdir',
    'readline',
    'readlink',
    'readpipe',
    'recv',
    'redo',
    'ref',
    'rename',
    'require',
    'reset',
    'return',
    'reverse',
    'rewinddir',
    'rindex',
    'rmdir',
    'say',
    'scalar',
    'seek',
    'seekdir',
    'select',
    'semctl',
    'semget',
    'semop',
    'send',
    'shift',
    'shmctl',
    'shmget',
    'shmread',
    'shmwrite',
    'shutdown',
    'sin',
    'sleep',
    'socket',
    'socketpair',

    'sprintf',
    'sqrt',
    'srand',
    'stat',
    'state',
    'study',


    'symlink',
    'tie',
    'tied',
    'time',
    'times',
    'truncate',
    'uc',
    'ucfirst',
    'umask',

    'unlink',
    'unpack',
    'unshift',
    'untie',
    'use',
    'utime',
    'values',
    'vec',
    'wait',
    'waitpid',
    'warn',
    'write',
  ]

  def __init__(self, metalang):
    super(OoplPerl, self).__init__(
      metalang, 'perl', 'Perl', ['pm'], OoplPerl.CONFIG)

  def compile_field(self, metafile, construct, config):
    """Compile a field construct.

    Args:
      metafile: MetaFile
      construct: Construct
      config: dict
    """
    # In perl, the templates for construct field have the following vars:
    #  field: the meta-level name of the field
    #  rawfield: the base-level name of the field
    #  accessors: visibility and level of the field, etc.

    streams = metafile.streams()
    context = metafile.context()

    class_construct = construct.findAncestor('class')
    class_name = class_construct.id()
    baserem = self.config('rem')

    # The 'select' feature attribute determines which accessors to generate,
    select_attr, select = self.consattr(construct, 'select', missing=LOOKUP)

    # The 'visibility' feature attribute determines the visibility of
    # these accessor methods (and the visibility of the field itself),
    visibility_attr, visibility = self.consattr(
      construct, 'visibility', missing=LOOKUP)

    # The 'level' feature attribute establishes whether the field and
    # accessors are defined on the class or its meta-class.  In languages
    # with support for both in the same class, the methods are defined
    # where appropriate for the language.
    level_attr, level = self.consattr(construct, 'level', missing=LOOKUP)

    # The 'status' feature attribute specifies whether the field is optional.
    # If so, the accessor code changes significantly, in order to support
    # this semantics.  An optional field must always be visibility private.
    status_attr, status = self.consattr(construct, 'status', missing=LOOKUP)

    # The 'type' secondary attribute defines the type of the field.
    type_attr, metatype = self.consattr(construct, 'type', missing=LOOKUP)
    basetype = self.metaTypeToBase(metatype)

    # The 'default' secondary attribute provides a default value (implicit
    # initialization during constructor invocation, required value for
    # optional fields.
    #
    # TODO(wmh): Verify that default can never taken on the value None unless
    # it doesn't exist.
    #
    # TODO(wmh): For now, we do not perform any lookup on this attribute, but
    # there are obvious problems in being inconsistent about how various
    # attributes are treated.
    default_attr, default = self.consattr(construct, 'default', missing=None)

    # The 'scope' secondary attribute defines accessors and controls the
    # visibility of those accessors more so than the 'visibility' feature can.
    scope_attr, scope = self.consattr(construct, 'scope', missing=EMPTY)

    # Establish the modifiers
    modifiers = []
    if level == 'static':
      modifiers.append('static')
    elif level == 'meta':
      # We need to write this field to the meta-class instead.
      print 'Not yet supporting meta-level fields in Java.'

    # Setup the varset used to instantiate accessor templates
    primary = construct.attr('field')
    rawfield = self.rawField(construct.id(), context)
    if select == 'raw' and visibility == 'public':
      rawfield = construct.id()
    varset = VarSet()
    varset.addVar('field', primary.value())
    varset.addVar('rawfield', rawfield)

    # For now, we aren't using any of the higher-level perl modules for
    # making OO easier.  May do so later, or may just explore doing it all
    # from first principles.  So, no field constructs in base perl, nothing
    # to write to 'fields' stream.

    preamble = []
    if 'get' in select:
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Perl')
      else:
        code = ["return $_[0]->{'%s'};" % rawfield]
      self._compileAccessor(
        metafile, construct, scope_attr, 'get', varset, code, preamble=preamble)

    if 'set' in select:
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Perl')
      else:
        code = ["$_[0]->{'%s'} = $_[1];" % rawfield]
      self._compileAccessor(
        metafile, construct, scope_attr, 'set', varset, code, preamble=preamble)

    if 'ref' in select:
      if status == 'optional':
        raise meta.compiler.errors.Error(
          'Not yet supporting optional fields in Perl')
      else:
        code = ["return $_[0]->{'%s'};" % rawfield]
      self._compileAccessor(
        metafile, construct, scope_attr, 'ref', varset, code, preamble=preamble)

    # TODO(wmh): Add code for generating test methods for accessors.

  # ----------------------------------------------------------------------
  # Template method instantations.

  def _augmentVarset(self, construct, varset, activity, data):
    """Modify the varset for a given construct.

    Args:
      construct: Construct
        The construct being compiled.
      varset: VarSet
        The varset that has been formed, ready for use in instantiating a template.
      activity: str
        A conceptual indicator of what activity is being performed.  Examples:
         'class': compiling a class
         'method': compiling a user-defined method (in either source or test class)
         'test-method': compiling a auto-generated test-method
      data: dict
        A mapping of variable to value, for variables a subclass implementation
        might find useful.
    """
    if 'namespace' in data:
      # TODO(wmh): Find an appropriate attribute to pass in here for line
      # mapping purposes?
      varset.addVar('namespace_colon', data['namespace'].replace('.', '::'))

    if activity == 'namespace':
      pass

    elif activity == 'class' or activity == 'class-by-namespace':
      varset.addVar('fqn_class', construct.fullid().replace('.', '::'))
      # Must be fixed up when we start supporting multiple inheritance.
      parent = data['parent'].replace('.', '::')
      parent_spec = "use parent '%s';" % parent if parent != '""' else ''
      varset.addVar('parent_spec', parent_spec)

    elif activity == 'method':
      varset.interpolate('modifiers', {'modifiers': ''})


    elif activity == 'test-method':
      print '****** HERE'

    else:
      super(OoplJava, self)._augmentVarset(construct, varset, activity, data)

  def _superSyntax(self, construct, class_name, method_name, args):
    """Return the syntax used to invoke the parent definition of method.

    Args:
      construct: Construct
        The construct (method or initializer) needing the super syntax.
      class_name: str
        Name of class
      method_name: str
        Name of method
      args: list of str
        The args to pass to the super call.

    Returns: str
      Valid base-language syntax for invoke a parent method.
    """
    super_method = '$%s->SUPER::%s' % (self.config('self'), method_name)
    args_str = ', '.join(args)
    # TODO(wmh): Generalize the 76 to "method indentation depth"
    if len(super_method) + len(args_str) + 2 < 76:
      super_call = super_method + '(' + args_str + ');'
    else:
      super_call = super_method + '(' + '\n  ' + args_str + ');'
    return super_call

  def formatParams(self, construct, metafile, invocation=False, level=None):
    """Obtain the python syntax for parameters from construct.

    Args:
      construct: Construct
      metafile: MetaFile
      invocation: bool
        If True, format args for invocation, not declaration.  This
        means the receiver is not added at the beginning, and
        default values are not included, just the var names.
      level: str or None
        One of 'instance', 'static' or 'meta', or None.  If None, it
        means we should NOT add the level-dependent receiver arg to the
        args list (this is useful, for example, when formatting a call to
        super().method(args).  This is usually NOT None though, and the fact
        that this is a keyword arg should not let one think that the default
        value is usually the correct one ... only in rare circumstances is it.

    Returns: tuple
      [0] str (the parameter listing itself)
      [1] list of str (type checking code to insert at the beginning of the
          method to validate args, assign defaults, etc.).
      [2] array of lines representing formatted documentation for all args.
      [3] the attribute to use when reporting location of generated code.
    """
    # TODO(wmh): Dig up the old code and add it in here.
    typechecks = []
    doclines = []

    info_list, params = self._extractParams(construct)

    varlist = []
    vallist = []
    deflist = []

    receiver = None
    if construct.kind() == 'initializer':
      receiver = self.config('self')
    else:
      level_attr, level = self.consattr(construct, 'level', missing=LOOKUP)
      if level == 'instance':
        receiver = self.config('self')
      elif level == 'meta':
        receiver = self.config('cls')
    if receiver:
      varlist.append('$' + receiver)

    for info in info_list:
      name = info['name']
      metatype = info['metatype']
      typechr = '$'
      if False:  # FIX THIS!
        typechr = '%' if metatype.base() == 'map' else '@'
      varlist.append(typechr + name)
      if 'default' in info:
        default = info['default']
        deflist.append('$%s = %s if (! defined $%s);' % (name, default, name))

    # typechecks is a bit of a misnomer, as this is just doing variable
    # spreading and default-value assignment currently.
    typechecks = ['my (%s) = @_;' % ', '.join(varlist)]
    typechecks.extend(deflist)

    attribute = params if params.value() else construct.primary()
    return [], typechecks, doclines, attribute

  def _formatImports(self, imports, formatted=False):
    """Create base-language import statements given a list of
    special meta-level import specifications.

    Args:
      imports: list of str
        The imports to format.  Normally, this is a baselang-independent
        format encoding relevant information suitable for passing to
        BaseLanguageOopl._parseEncodedImports(), but if formatted==True,
        each line is a valid import statement in the baselang in question.
      formatted: bool
        Controls how imports are interpreted. See 'imports' above.
    Returns: str
      A multi-line list of python import statements.
    """
    lines = []
    data = self._parseEncodedImports(imports)
    merged = data['core'] + data['complex'] + data['full']
    for item in merged:
      line = 'use ' + item['base'].replace('.', '::')
      if 'sub' in item:
        line += '::' + item['sub']
      if 'cls' in item:
        line += '::' + item['cls']
      else:
        # Perl does not have support for this ... how to deal with it???
        print 'ERROR: Perl cannot deal with non-class import %s' % item
        continue
      lines.append(line)
    # Because ';' is lexigraphically before ':', we cannot just add the ';'
    # before sorting.
    return '\n'.join([line + ';' for line in sorted(set(lines))])

  def metaTypeToBase(self, metatype):
    """Convert a metatype to its perl equivalent.

    Args:
      metatype: Type
        The type to convert.

    Returns: str
    """
    # TODO(wmh): Do we need anything more?  Since Perl doesn't have any real
    # typing, using the metatype syntax is as good as any for perl, no?
    # Need to look into how Perl6 is going to deal with typing.
    return metatype.raw()

  def groupBaseFiles(self, filelist):
    """Group and sort a list of baselanguage files.

    Args:
      filelist: list of str
        The files to sort.

    Returns: list of list of str
      Each element is a group of paths from filelist that are to be displayed
      together (usually on the same row where possible).
    """
    # We want to group path/File.pm and path/FileTest.pm into a group.
    groups = collections.OrderedDict()
    pre = re.compile('(.*?)((?:Test)?\.pm)$')
    for path in sorted(filelist):
      match = pre.match(path)
      if match:
        basepath, suffix = match.groups()
        groups.setdefault(basepath, []).append(path)
      else:
        groups[path] = [path]
    result = []
    for group, sublist in groups.iteritems():
      result.append(sublist)
    return result


class OoplJavascript(BaseLanguageOopl):

  CONFIG = {
    'self': 'this',
    'cls': 'cls',
    # TODO(wmh): Should the meta-to-base mapping be a subdict within
    # config, something else entirely, etc.?
    'null': 'null',
    'true': 'True',
    'false': 'False',

    'rem': '//',
    'rem_start': None,
    'rem_end': None,

    'empty_scope_segment': [''],
    'class_primary': False,
    'initializer_name': None,
  }

  # See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Reserved_Words
  KEYWORDS = [
    'break', 'case', 'catch', 'continue', 'debugger', 'default', 'delete',
    'do', 'else', 'finally', 'for', 'function', 'if', 'in', 'instanceof',
    'new', 'return', 'switch', 'this', 'throw', 'try', 'typeof', 'var',
    'void', 'while', 'with',
    # future
    'class', 'enum', 'export', 'extends', 'import', 'super',
    # future if strict
    'implements', 'interface', 'let', 'package', 'private', 'protected',
    'public', 'static', 'yield',
  ]

  def __init__(self, metalang):
    super(OoplJavascript, self).__init__(
      metalang, 'javascript', 'Javascript', ['js'], OoplJavascript.CONFIG)

  def compile_field(self, metafile, construct, config):
    """Compile a field construct.

    Args:
      metafile: MetaFile
      construct: Construct
      config: dict
    """
    context = metafile.context()
    class_construct = construct.findAncestor('class')
    class_name = class_construct.id()
    pyrem = self.config('rem')
    print 'HERE with %s' % construct.kindid()

  # ----------------------------------------------------------------------
  # Template method instantations.

  def _augmentConstruct(self, metafile, construct, config):
    """Invoked before the construct-specific compile_* method.

    This template method allows individual baselanguages to perform any
    modifications of the parse tree necessary before rendering (for example,
    some baselanguages might require certain implicit methods be added,
    etc.)

    Args:
      metafile: MetaFile
        ...
      construct: Construct
        The construct to potentially modify
      config: dict
        Configuration information.

    Returns: dict or None
      A modified version of config, or None (meaning no modification of
      config needed).
    """
    kind = construct.kind()
    pass

  def _augmentVarset(self, construct, varset, activity, data):
    """Modify the varset for a given construct.

    This method is invoked by the default implementations of various compile_*
    methods as a means of providing subclasses a degree of customization without
    requiring a full overriding of the methods themselves. Since the varsets are
    the culmination of what is needed to instantate a template representing
    baselanguage code, by providing a hook into base-language specific
    functionality before we instantiate the varset, subclasses should have a fair
    amount of flexibility in how they customize things.

    Args:
      construct: Construct
        The construct being compiled.
      varset: VarSet
        The varset that has been formed, ready for use in instantiating a template.
      activity: str
        A conceptual indicator of what activity is being performed.  Examples:
         'class': compiling a class
         'method': compiling a user-defined method (in either source or test class)
         'test-method': compiling a auto-generated test-method
      data: dict
        A mapping of variable to value, for variables a subclass implementation
        might find useful.
    """
    print '**** HERE with activity %s for %s' % (activity, construct.fullid())
    if activity == 'class' or activity == 'class-by-namespace':
      pass

    elif activity == 'method' or activity == 'test-method':
      print 'adding fqn_class %s' % construct.fullid()
      varset.addVar('fqn_class', construct.fullid())

    else:
      super(OoplJavascript, self)._augmentVarset(
        construct, varset, activity, data)

  def _superSyntax(self, construct, class_name, method_name, args):
    """Return the syntax used to invoke the parent definition of method.

    Args:
      construct: Construct
        The construct (method or initializer) needing the super syntax.
      class_name: str
        Name of class
      method_name: str
        Name of method
      args: list of str
        The args to pass to the super call.

    Returns: str
      Valid base-language syntax for invoke a parent method.
    """
    super_method = 'JS %s.%s' % (class_name, method_name)
    return super_method
    
    super_method = 'super(%s, %s).%s' % (
      class_name, self.config('self'), method_name)
    args_str = ', '.join(args)
    # TODO(wmh): Generalize the 76 to "method indentation depth"
    if len(super_method) + len(args_str) + 2 < 76:
      super_call = super_method + '(' + args_str + ')'
    else:
      super_call = super_method + '(' + '\n  ' + args_str + ')'
    return super_call

  def docstr(self, context, construct, indent=0, add_params=False,
             default_lines=None):
    """Format the comment attribute of a construct as a python doc-string.

    Args:
      context: Context
      construct: Construct
      indent: int
      add_params: bool
        If True, we are generating a method docstr, and are to add a 'Args:'
        section to the doc string for each  parameter, and a 'Returns:' section
        for the return value.
      default_lines: list of str or None
        The list of lines to use as the docstr if one doesn't exist.
        None means don't add one.

    Returns: two-tuple
      [0] str
        Note that this value never has a trailing newline (and callers rely on
        this fact).
      [1] Attribute (the 'comment' attribute from which docstr was obtained)
    """
    comment, _ = self.consattr(construct, 'comment')
    if comment:
      # We form a string representing documentation for each argument.
      docstr = comment.asStr(
        context, strip_comments=True, indent=indent,
        default_lines=default_lines)
      if docstr:
        # Parse parameters, if we are forming the docstr for a method (controlled
        # by 'add_params')
        arglines = []
        if add_params:
          params = construct.attr(
            'params', default=EMPTY, context=context, param=self.id())
          for param in params.value():
            name = param.id()
            type_ = param.attrval('type', defattr=LOOKUP, context=context)
            arglines.append('%s: %s' % (name, self.metaTypeToBase(type_)))
            arg_comment = param.attrval(
              'comment', defattr=EMPTY, context=context)
            if arg_comment:
              try:
                arglines.extend(['  ' + l[3:] for l in arg_comment])
              except TypeError:
                print '%s has param %s with %s' % (
                  construct.kindid(), name, str(arg_comment))
          if arglines:
            docstr += '\nArgs:\n' + '\n'.join(['  ' + l for l in arglines])

          return_type = construct.attr('returns', default=None, context=context)
          if return_type:
            if arglines:
              docstr += '\n'
            docstr += '\nReturns: %s\n' % self.metaTypeToBase(
              return_type.value())
            return_comment = construct.attr(
              'returns_', default=None, context=context)
            if return_comment:
              retlines = return_comment.value()
              docstr += '\n'.join(['  ' + l[3:] for l in retlines])

        com_delim = '\"\"\"'
        if '\n' not in docstr.strip():
          # One-line comments should not have a newline.
          docstr = docstr[:indent] + com_delim + docstr.strip() + com_delim
        else:
          # Multi-line comments have their end-of-string syntax on a separate
          # line below the comment text.
          dent = docstr[:indent]
          docstr = dent + com_delim + docstr.strip() + '\n' + dent + com_delim
      else:
        docstr = ''
    else:
      docstr = ''
    return docstr, comment

  def formatParams(self, construct, metafile, invocation=False, level=None):
    """Obtain the python syntax for parameters from construct.

    Args:
      construct: Construct
      metafile: MetaFile
      invocation: bool
        If True, format args for invocation, not declaration.  This
        means the receiver is not added at the beginning, and
        default values are not included, just the var names.
      level: str or None
        One of 'instance', 'static' or 'meta', or None.  If None, it
        means we should NOT add the level-dependent receiver arg to the
        args list (this is useful, for example, when formatting a call to
        super().method(args).  This is usually NOT None though, and the fact
        that this is a keyword arg should not let one think that the default
        value is usually the correct one ... only in rare circumstances is it.

    Returns: tuple
      [0] str (the parameter listing itself)
      [1] list of str (type checking code to insert at the beginning of the
          method to validate args, assign defaults, etc.).
      [2] array of lines representing formatted documentation for all args.
      [3] the attribute to use when reporting location of generated code.
    """
    # TODO(wmh): Rewrite this using self._extractParams.

    # TODO(wmh): Clean up the partial redundancy between 'invocation' and
    # 'level' == None.

    args = []
    typechecks = []
    doclines = []

    context = metafile.context()

    if not invocation:
      # Add iniital receiver arg (but only if we aren't generated for
      # invocation).
      if level is None:
        # We are not supposed to add the receiver to the args listing.
        pass
      elif level == 'instance':
        args.append(self.config('self'))
      elif level == 'meta':
        args.append(self.config('cls'))
      elif level == 'static':
        pass
      else:
        raise meta.compiler.errors.InternalError('Invalid level %s' % level)

    # TODO(wmh): Provide type-checking support.
    params = construct.attr(
      'params', default=EMPTY, context=context, param=self.id())
    in_keywords = False
    for param in params.value():
      name = param.id()
      type_ = param.attrval('type', defattr=LOOKUP, context=context)
      type_base = type_.base()
      kind = param.attrval('kind', defattr=LOOKUP, context=context)
      comment = param.attrval('comment', defattr=EMPTY, context=context)

      doclines.append('%s: %s' % (name, type_.raw()))
      if comment:
        doclines.extend(['  ' + l for l in comment])

      # Some initial vararg parsing.
      #   - only two types are currently supported, 'vec' and 'map'
      vararg_str = None
      if kind == 'vararg':
        if type_base == 'vec':
          vararg_str = '*' + name
        elif type_base == 'map':
          vararg_str = '**' + name
        else:
          raise meta.compiler.errors.InvalidType(
            'vararg %s has invalid type %s' %
            (param.kindid(), str(type_)))

      # We want to check whether a 'default' exists or not, before asking
      # for attrval, because there is currently no way to distinguish between
      # an empty default string and a non-existent default.
      defattr = param.attr('default', default=None, context=context)
      if defattr:
        if kind == 'vararg':
          raise meta.compiler.errors.Error(
            'vararg %s cannot have a default' % param.kindid())
        default = self.baseValue(defattr.valueStr())
        # Depending on type, we need to wrap default in single or double quotes,
        # etc.  We really need the Type class defined to do this properly, but
        # for now we hack it up.
        #
        # TODO(wmh): type should by a Type instance, and the conversion code
        # can maybe be written on it (or at least facilitated).
        if False and type_base == 'str':
          if default.find('\"') == -1:
            default = '"%s"' % default
          elif default.find("'") == -1:
            default = "'%s'" % default
          else:
            default = "'%s'" % default.replace("'", "\\'")
        # Indicate that we've started parsing keywords, not positionals
        in_keywords = True

        if invocation:
          args.append(name)
        else:
          args.append(name + '=' + default)
      elif kind == 'vararg':
        # Special case.
        name_str = vararg_str
        args.append(name_str)
      else:
        default = None
        if in_keywords:
          # We encountered a positional arg after a keyword arg.  This should
          # be validated during parsing, not during compilation, but for now
          # we report it here.
          raise meta.compiler.errors.Error(
            'Line %d: Found positional %s after keyword param encountered' %
            (param.primary().line(), name))
        else:
          if kind == 'vararg':
            name_str = vararg_str
          else:
            name_str = name
          args.append(name_str)

    # If there is no params attribute, we use the primary attribute instead.
    # TODO(wmh): Should we just use None instead of the primary attribute?
    attribute = params if params.value() else construct.primary()

    return args, typechecks, doclines, attribute

  def metaTypeToBase(self, metatype):
    """Convert a metatype to its base-language equivalent.

    Args:
      metatype: Type
        The type to convert.

    Returns: str
    """
    return 'JS no metaTypeToBase'
    
    #print 'Here with %s [%s and %s]' % (metatype.raw(), metatype.prefix(), metatype.base())
    if not metatype.isValid():
      result = metatype.raw()
      if result.startswith('{#') and result.endswith('#}'):
        result = result[2:-2].replace('_', ' ')
    else:
      core = (
        (metatype.prefix() or '') +
        (metatype.base() or '?'))
      if core.startswith('{#') and core.endswith('#}'):
        result = core[2:-2].replace('_', ' ')
      else:
        result = self.METATYPES.get(core, None)
        if result:
          params = metatype.params()
          if params:
            param_strs = [self.metaTypeToBase(p) for p in metatype.params()]
            result += ' of ' + '/'.join(param_strs)
        else:
          result = metatype.raw().lstrip('*')
    return result


class MetaStream(Meta):
  """Provides support for writing output to multiple streams with aggregation.

  When compiling a Meta construct into base language constructs, the following
  functionality is useful:
    - ability to write to multiple conceptual streams incrementally, and to
      serialize various streams to files
    - ability to know what a line number in the meta source code corresponds to
      in a base language file.
    - ability to form base language output by instantiating a multi-line
      template string.
    - ability to enforce various constraints on the generated output (for
      example, no line longer than 80 characters, etc.)
    - ...

  This class provides support for all of the above.
  """

  def __init__(self):
    super(MetaStream, self).__init__()
    # field streams: dict
    #   Maps conceptual stream names to lists of (str|MetaSegment) instances
    self._streams = {}

  def clear(self):
    self._streams = {}

  def streamNames(self):
    return sorted(self._streams)

  def stream(self, name, clear=False, create=False):
    """Obtain the stream with the given name.

    Args:
      name: str
        The conceptual stream name to return.
      clear: bool
        If true, a new stream is created even if it already exists.
        Note that the stream is completed replaced, not just cleared.
      create: bool
        If True, create if it does not exist.  By default, we want to
        report an error, because we usually want to explicilty create
        streams in parent compilation methods.

    Returns: list of (str or MetaSegment)
    """
    streams = self._streams
    result = streams.get(name, None)
    if clear:
      if name in streams:
        streams[name] = []
    if name not in streams:
      if create:
        result = []
        streams[name] = result
      else:
        raise meta.compiler.errors.Error(
          'Failed to find stream named "%s"' % name)
    return result

  def initStreams(self, *names):
    """Create streams by name, for both source and test.

    Args:
      *names: list of str
    """
    for sname in names:
      for prefix in ('', 'test-'):
        self.stream(prefix + sname, clear=True, create=True)

  def addLine(self, name, line):
    stream = self.stream(name)
    stream.append(line)
    #if name.endswith('classes'):
    #  print 'Adding "%s" to %s: %s' % (line, name, stream)

  def addSegment(self, name, segment, create=False):
    stream = self.stream(name, create=create)
    stream.append(segment)

  def flatten(self, name, spaces=0, indent=0):
    """Collapse a stream into a single multi-line text stream.

    Args:
      name: str
        Name of stream.
      spaces: int
        How many blank lines to insert between segments.
      indent: int
        How many spaces to insert at the beginning of each line.

    Returns: str
    """
    lines, mapping = self.flattenWithMap(name, spaces=spaces, indent=indent)
    return '\n'.join([line.rstrip() for line in lines])

  def flattenWithMap(self, name, spaces=0, indent=0):
    """Collapse a stream into a single multi-line text stream.

    Args:
      name: str
        Name of stream.
      spaces: int
        How many blank lines to insert between segments.
      indent: int
        How many spaces to insert at the beginning of each line.

    Returns: two-tuple
      [0] str (the text)
      [1] list of tuples (local file line numbers to meta file line numbers)
    """
    dentstr = ' ' * indent
    lines = []
    stream = self.stream(name)

    segment = MetaSegment(stream)
    lines, mapping = segment.flattenLines(indent=dentstr)
    return lines, mapping

  def flattenAll(self, spaces=0, indent=0):
    """Flatten all streams.

    Args:
      spaces: int
        How many blank lines to insert between segments.
      indent: int
        How many spaces to insert at the beginning of each line.
    """
    result = ''
    for stream in sorted(self._streams):
      result += '\n' + stream + '\n'
      result += self.flatten(stream, spaces=spaces, indent=indent + 2)
    return result

  def dump(self, title=None, fp=sys.stdout):
    """Show all streams.

    Args:
      title: str or None
        A title to show.
      fp: file
        Where to write output.
    """
    indent = ''
    if title:
      fp.write('%s%s\n%s\n' % (indent, '#' * 80, title))
    fp.write(self.flattenAll())
    fp.write('\n')
    

class MetaSegment(Meta):
  """A collection of contiguous source code lines."""

  def data(self):
    return self._data

  def mapping(self):
    return self._mapping

  def indent(self):
    return self._indent

  def __init__(self, data, mapping=None, indent=''):
    super(MetaSegment, self).__init__()
    # field data: list of (str|MetaSegment)
    #   The lines and MetaSegments making up this MetaSegment.
    self._data = data

    # field mapping: list of tuples
    #  - The first element of each tuple is an index within 'data' (the
    #    list of strings making up the segment, and representing a position
    #    in baselanguage text).
    #  - The second element of each tuple is the line number in a meta file
    #    at which the text starting at lines[mapping[i][0]] starts in the
    #    meta representation.
    #  - The optional third element describes what the text being started
    #    represents (comment in method of class of namespace, etc.)
    #  - The optional fourth element specifies the .meta file refered to by
    #    second element.  If not specified, it is the same file as the
    #    previous mapping element.  Often, only the very first element of
    #    mapping has such a value because all elements refer to the same
    #    .meta file.
    if mapping is None:
      mapping = []
    self._mapping = mapping

    # field indent: str
    #   dk
    self._indent = indent

  def updateIndent(self, indent):
    self._indent += indent

  def flattenLines(self, indent=''):
    """Obtain a sequence of lines from this segment and the meta/base mapping.

    This code produces a list of lines in baselanguage syntax, based on the
    compilation of meta-syntax constructs.  We want to know the correspondence
    between base language line numbers and meta-level line numbers.

    The self._mapping field maintains a dictionary that has conceptual
    substitution points as keys and two-tuples as values (a
    baselang/meta line number mapping).

    Args:
      indent: str
        The amount of indentation before each line.  This is in addition
        to any indentation specified in self._indent.

    Returns: list of 2-tuple to 4-tuple
      [0]  list of str
      [1]  list of two-tuples, representing index/line mappings (first element
           of pair is an index within result[0], second element is the
           corresponding line in a meta source file).
      [2?] optional str (describes significance of the location)
      [3?] optional str (metafile)
    """
    indent = self._indent + indent
    lines = []
    full_mapping = []
    mapping = self._mapping
    mi = 0
    if mi >= len(mapping):
      mapping = None
    for i, line in enumerate(self._data):
      # We add any line mapping tuples from my local mapping to the full mapping
      # that apply at this point.
      while mapping and mapping[mi][0] <= i:
        full_mapping.append(mapping[mi])
        mi += 1
        if mi >= len(mapping):
          mapping = None

      # Now add the line (which may be an entire MetaSegment that needs to
      # be expanded).
      if isinstance(line, MetaSegment):
        sublines, submap = line.flattenLines(indent=indent)
        local_index = len(lines)
        lines.extend(sublines)
        # Add submap entries (properly adjusted) to full_mapping
        for tup in submap:
          full_mapping.append(tuple([tup[0] + local_index] + list(tup[1:])))
      else:
        if line:
          lines.append(indent + line)
        else:
          lines.append('')

    return lines, full_mapping

  def flattenStr(self, indent='', linenum=0):
    """Obtain a multi-line string for this stream.

    Args:
      indent: str
        What to insert at the beginning of each line.
      linenum: int
        This has two purposes: 1) indicate whether numbering is desired, and
        2) specify what number to start at.  If this is 0, it means no
        numbering is wanted.
    """
    lines, mapping = self.flattenLines(indent=indent)
    if linenum:
      result = '\n'.join(
        ['%4d: %s' % (i, line) for i, line in enumerate(lines, start=linenum)])
    else:
      result = '\n'.join(lines)
    return result

  def extendFromInterpolationData(self, more_lines, indent='', strip=False,
                                  debug=False):
    """Provide specialized funtionality for adding data to a stream.

    The first line of more_lines is appending to the end of the current
    self.data(), with various special semantics:
     - if more_lines[0] is a MetaSegment instance, data[-1] must consist
       solely of whitespace, and it indicates how much the indentation
       of more_lines[0] should be increased as it is added.  Note that
       subsequent elements of more_lines are not currently indented, so
       this is really only meaningful for a single element more_lines.
       TODO(wmh): do we want to indent all values in more_lines by data[-1]?

    Args:
      lines: list of str|MetaSegment
        This is the data to be added.  It can be a mixture of strings and
        MetaSegment instances.
      more_lines: list of str|MetaSegment
        The lines to add to 'lines'
      indent: str
        How much indentation to add before each line in more_lines when
        adding to lines.
      strip: bool
        If True, and a variable is replaced by nothin, and the resulting
        line upon which the variable starts contains only whitespace, do
        not include the line in lines.
      debug: bool
        If True, output diagnostic information.
    """
    lines = self.data()

    if debug:
      print '#' * 80
      for i, val in enumerate(more_lines, start=1):
        print '%3d: "%s"' % (i, val)
      print '-' * 80
      for i, val in enumerate(lines, start=1):
        print '%3d: "%s"' % (i, val)

    num_lines = len(more_lines)
    if num_lines and (num_lines > 1 or (num_lines == 1 and more_lines[0])):
      # We need to add the first line of more_lines to the end of lines[-1].
      if isinstance(more_lines[0], MetaSegment):
        # Adding a MetaSegment to the end of lines[-1] is only valid when
        # lines[-1] consists solely of whitespace (and is exactly equal to
        # 'indent').  It establishes how much indentation to add to the
        # MetaSegment.

        if lines[-1] != indent:
          # TODO(wmh): Is this really necessary?  Or do we want to assume
          # that the user has done what they intended with whitespace, and
          # allow arbitrary indentation, rather than forzing a confirmation
          # by passing in the same indent value as lines[-1]
          raise meta.compiler.errors.Error(
            'Found MetaSegment being added to end of non empty line "%s"' %
            lines[-1])
        # Adding the MetaSegment to the end of lines[-1] involves adjusting
        # the indent of the MetaSegment and replacing lines[-1] with the
        # MetaSegment.
        #
        # TODO(wmh): We should clone more_lines[0] before adding it to
        # lines, so that its state is maintained here but the original
        # obj can be modified later without affecting the snapshot.
        more_lines[0].updateIndent(lines[-1])
        lines[-1] = more_lines[0]

      else:
        # Simple string ... append to previous line.
        if more_lines[0]:
          if not lines:
            lines.append('')
          if isinstance(lines[-1], MetaSegment):
            raise meta.compiler.errors.Error(
              'Found unexpected MetaSegment when adding string "%s"'
              'to end of line' % more_lines[0])
          lines[-1] += more_lines[0]

      for line in more_lines[1:]:
        if isinstance(line, MetaSegment):
          segment = line
          segment.updateIndent(indent)
          # TODO(wmh): Verify that updating segment will properly convey
          # the indentation to all sub-segments of that segment as well!
          lines.append(segment)
          # We cannot append to the end of the last line of a MetaSegment,
          # so we start a new line.  This may cause line-number issues
          # though!  Need to ensure that any variable specified in a template
          # that is list-valued is alone on the line?!
          #lines.append('')
        else:
          if line:
            lines.append(indent + line)
          else:
            lines.append('')

    if debug:
      print '-' * 80
      for i, val in enumerate(lines, start=1):
        print '%3d: "%s"' % (i, val)
      print '#' * 80

  def serialize(self, metafile, path, meta_file, details=False):
    """Write to disk.

    Args:
      metafile: MetaFile
      path: str
      map_path: str
      meta_file: str
      details: bool

    Returns: bool
      Returns True if the file was written, False if there was no need to
      write it (because the previous contents match identically)
    """
    meta_lines, mapping = self.flattenLines(indent='')
    context = metafile.metalang().context()
    lines = []
    for line in meta_lines:
      lines.append(context.re('quote_dent').sub('', line))
    content = '\n'.join(lines) + '\n'

    original_content = ''
    if IO.exists(path):
      fp = IO.reader(path)
      try:
        original_content = fp.read()
      finally:
        IO.close(fp)

    file_changed = False
    if content != original_content:
      fp = IO.writer(path)
      try:
        fp.write(content)
      finally:
        IO.close(fp)
      logging.info('Wrote %d bytes to %s', len(content), path)
      file_changed = True
    else:
      Log.info('Verified %s', path)
      # Log.uninfo('Verified %s', path)

    # Write the .map file for the class (maps .meta line numbers to baselang
    # line numbers).
    dirname, filename = os.path.split(path)
    basename, suffix = os.path.splitext(filename)
    map_path = os.path.join(
      dirname, ('' if basename.startswith('.') else '.')  + basename + '.map')
    if file_changed or not IO.exists(map_path):
      # TODO(wmh): Verify that it is safe to not write the .map file when
      # no changes have occurred (as long as the map file does indeed already
      # exist).
      mapfp = IO.writer(map_path)
      try:
        first = True
        for tup in mapping:
          # tup[0] is an index into the 'lines' var, which is 0-based, but when
          # printing out line numbers we want to add one to get 1-based values.
          local_line = tup[0] + 1
          meta_line = tup[1]
          spec = tup[2]
          if details or meta_line:
            if first:
              mapfp.write('%6d %6d %-40s %s\n' % (
                local_line, meta_line, spec, meta_file))
              first = False
            else:
              mapfp.write('%6d %6d %s\n' % (local_line, meta_line, spec))
      finally:
        IO.close(mapfp)
      Log.info('Wrote %s', map_path)
      if False:
        # TODO(wmh): Add some kind of verbosity flag to control this.
        Log.uninfo('Wrote %s', map_path)

    return file_changed

  @classmethod
  def NewFromFile(cls, path, mappath, extract_re=None):
    """Create a MetaSegment for the contents of path, with map 'mappath'.

    This performs a reversal of serialize(), except that:

      - the resulting MetaSegment will always contain a list of strs, whereas
        the invocation of serialize() may have contained nested MetaSegment
        instances.

      - if serialize() was invoked with details=False, the mapping in the
        returned MetaSegment will be a subset of that used to write the files.

    Args:
      path: str
      mappath: str
      extract_re: regexp
        If this regexp matches the content of the file, the match is replaced
        with the empty string, and the contents of group 1 are returned.

    Returns: two-tuple
     [0] MetaSegment
     [1] list of str or None
       contents of group 1 of regexp match or None if no match found.
    """
    result = None
    text = []
    found = [False]
    debug = False

    mapping = cls.LoadMapping(mappath)

    def Hack(obj):
      found[0] = True
      txt = obj.group(1)
      # Remember the replaced text.
      text.extend(txt.split('\n'))
      # Replace the text with the empty string.
      return ''

    def Show(title, text):
      print title
      print '  path: ' + path
      print '  regxp: ' + (
        extract_re.pattern.replace('\n', '\\n') if extract_re else '')
      for i, line in enumerate(text.split('\n')):
        print '%2d: %s' % (i, line)
      pprint.pprint(mapping)

    fp = IO.reader(path)
    try:
      content = fp.read()
      if debug:
        Show('BEFORE', content)
      if extract_re:
        content = extract_re.sub(Hack, content)
    finally:
      IO.close(fp)

    data = [line.rstrip() for line in content.split('\n')]
    if data and data[-1] == '':
      data.pop()

    found = found[0]
    if found:
      removed_count = len(text) + 2
      if removed_count:
        for item in mapping:
          if item[0] > 0:
            item[0] -= removed_count

    if debug:
      Show('AFTER', content)

    result = MetaSegment(data, mapping=mapping)
    return result, text

  @classmethod
  def LoadMapping(cls, mapfile):
    """Load a MetaSegment._mapping datastructure from disk.

    Reads the map file written by serialize().

    Returns: list of tuples
      The result is suitable for using as the 'mapping' argument to
      MetaSegment()
    """
    mapping = []
    line_re = re.compile('^\s*(\d+)\s+(-?\d+)\s*(\S+)\s*(.*)')
    fp = IO.reader(mapfile)
    try:
      lineno = 0
      for line in fp:
        lineno += 1
        match = line_re.match(line)
        if not match:
          raise meta.compiler.errors.Error(
            '%s:%d: Invalid: %s' % (mapfile, lineno,line))
        base_line, meta_line, desc, meta_file = match.groups()
        entry = [int(base_line)-1, int(meta_line),desc]
        if meta_file:
          entry.append(meta_file)
        mapping.append(entry)
    finally:
      IO.close(fp)
    return mapping


class Mapping(object):
  "base-to-meta line number mappings."""

  def __init__(self, mapfile):
    """Load a MetaSegment._mapping datastructure from disk.

    Reads the map file written by serialize().

    Returns: list of tuples
      The result is suitable for using as the 'mapping' argument to
      MetaSegment()
    """
    mapping = []
    line_re = re.compile('^\s*(\d+)\s+(-?\d+)\s*(\S+)\s*(.*)')
    if not IO.exists(mapfile):
      raise meta.compiler.errors.Error('No mapfile found in %s' % mapfile)
    fp = IO.reader(mapfile)
    try:
      lineno = 0
      for line in fp:
        lineno += 1
        match = line_re.match(line)
        if not match:
          raise meta.compiler.errors.Error(
            '%s:%d: Invalid: %s' % (mapfile, lineno,line))
        base_line, meta_line, desc, meta_file = match.groups()
        entry = [int(base_line)-1, int(meta_line),desc]
        if meta_file:
          entry.append(meta_file)
        mapping.append(entry)
    finally:
      IO.close(fp)
    self._mapping = mapping
    self._mapfile = mapfile

  def baseToMeta(self, basenum):
    """Convert a line number in a given base file to a metafile/line pair.

    Args:
      basenum: int
        A line number within self._mapfile.
    Returns: two-tuple
     [0] str; metafile
     [1] int; line number
    """
    meta_file = None
    meta_line = 0
    mapping = self._mapping
    # pprint.pprint(mapping)

    return meta_file, meta_line
    

class Compiler(Meta):
  """The meta compiler.

  Provides high-level methods used by metac (the front-end invoked by users)
  and by FileEnv.
  """

  # This variable is dynamically populated as meta-languages are needed.
  # See Compiler.metalangNamed()
  MetaLangs = {}

  def metalang(self):
    return self._metalang

  def typemap(self):
    return self._typemap

  def tokens(self):
    return self._tokens

  def __init__(self, metal='oopl', levels=None):
    """Initializer.

    Args:
      metal: str
        The name of the meta language within which we are operating.
      levels: dict or None
        Maps the various compilation dimensions to values
        'off', 'low', 'avg', 'high', 'max'.  If not provided, defaults are
        assigned.
    """
    if levels is None:
      levels = {
        'warn': 'max',
        'debug': 'max',
        'optimize': 'off',
        'profile': 'off',
        'inline': 'off',
      }

    super(Compiler, self).__init__()
    # field filemap: dict
    #   Maps file paths to MetaFile instances.
    self._filemap = {}

    # field metalang: MetaLang
    #   The metalanguage we are compiling into.
    metalang = self.metalangNamed(metal)
    if not metalang:
      raise meta.compiler.errors.Error(
        'Unknown MetaLanguage %s' % metal)
    self._metalang = metalang

    # field resources: dict
    #   Various useful files
    self._resources = {
      'emacs-template': self.metaPath('src/templates/meta-mode-template.el'),
      'makefile-template': self.metaPath('src/templates/Makefile.tmpl'),
    }

    # field typemap: dict
    #   Maps baselang type strings to meta type strings.
    #   TODO(wmh): Should this just be in self._resources?
    #   TODO(wmh): Should we call self.loadTypeMap() in this initializer?
    self._typemap = {}

    # field typecnt: dict
    #   Maps baselang type strings to number of times that type was seen
    #   during parsing.
    self._typecnt = {}

    # field tokens: dict
    #   Useful constants.
    #   TODO(wmh): How do we unify this with tokens in Context.
    self._tokens = {
      'rem': '/#',
      'term': ';',
      'quote_dent': ' >|',
    }

  def token(self, token, default=None):
    return self._tokens.get(token, default)

  def metalangNamed(self, name):
    name = name.lower()
    metalangs = self.MetaLangs
    if name in metalangs:
      result = metalangs[name]
    else:
      if name == 'meta':
        result = MetaMeta(self)
      elif name == 'oopl':
        result = MetaOopl(self)
      #elif name == 'doc':
      #  result = MetaDoc(self)
      else:
        raise meta.compiler.errors.Error('Unknown MetaLanguage %s' % name)
      metalangs[name] = result
    return result

  def context(self):
    return self.metalang().context()

  def resource(self, name):
    return self._resources.get(name, None)

  def loadTypeMap(self, typemap_file='.typemap'):
    """Load a typemap file.

    Args:
      typemap_file: str
        Name of file to load.
    """
    typemap = self._typemap
    if IO.exists(typemap_file):
      lineno = 0
      fp = IO.reader(typemap_file)
      try:
        line = fp.readline()
        lineno += 1
        base_meta_pair = line.split('->')
        if len(base_meta_pair) == 2:
          typemap[base_meta_pair[0].strip()] = base_meta_pair[1].strip()
        else:
          print 'Invalid line %d of %s: %s' % (lineno, typemap_file, line)
      finally:
        IO.close(fp)

  def generateBootstraps(self):
    """Generate python code to define a subclass of GenericConstruct.

    The generated code defines a Bootstrap$L() method for every meta-lang $L
    """
    config_re = re.compile('^(\s+)Construct\s+(\S+)\s*(.*)')
    data_re = re.compile('^(    \S|\s*$)')

    ofp = IO.writer('auto/bootstrap.py')
    ofp.write('"""Creates Schema instances from first principles."""\n')
    for metastr in ('meta', 'oopl'):
      metalang = self.metalangNamed(metastr)
      ofp.write('\n\ndef Bootstrap%s(cls):\n' % metalang.name())
      schema_file = os.path.join(metalang.srcdir(), 'schema.meta')
      constructs = []

      ifp = IO.reader(schema_file)
      linenum = 0
      for line in ifp:
        linenum += 1
        config_match = config_re.match(line.rstrip())
        if config_match:
          indent, ctype, rest = config_match.groups()
          if indent != '  ' or rest != 'config:':
            raise meta.compiler.errors.Error(
              '%s:%d:\n  Expecting 2-space indent and config: attribute' %
              (schema_file, lineno))
          constructs.append(ctype + '_construct')
          ofp.write(
            '\n  %s_construct = cls.BootstrapConstruct(\'%s\', """\n'
            % (ctype, ctype))

          # Read lines after config until next attribute. This relies on
          # indentation (everything with 4 spaces of indent or empty lines,
          # stopping at first line with 2 spaces of indent.
          for line in ifp:
            linenum += 1
            data_match = data_re.match(line)
            if data_match:
              ofp.write(line)
            else:
              # print '%s:%d: %s' % (schema_file, linenum, line)
              break
          ofp.write('    """)\n')
      IO.close(ifp)
      ofp.write(
        '\n  return [%s\n  ]\n' %
        ', '.join(['\n    ' + cons for cons in constructs]))
    IO.close(ofp)

  def generateMajorMode(self):
    """Instantiate a major mode by parsing its schema and writing a .el file."""
    metalang = self.metalang()
    if metalang:
      # First, we parse the schema for the metalang in question.
      schema = metalang.schema()
      context = metalang.context()
      constructs = schema.attrval('config')
      print sorted([cons.id() for cons in constructs])

      metaid = metalang.id()
      MetaLang = metalang.name()
      in_file = self.resource('emacs-template')
      out_file = self.metaPath(
        'src', 'schema', metaid, 'meta%s-mode.el' % metaid)

      print 'Creating major mode %s for %s (%s)' % (
        out_file, metaid, MetaLang)

      consinfo_map = context._consinfomap
      featkeys = {}
      featvals = {}
      secondaries = {}
      for cid, consinfo in consinfo_map.iteritems():
        featkeys.update(consinfo['featkeys'])
        featvals.update(consinfo['featvals'])
        secondaries.update(consinfo['secondaries'])
      ckeys = context.primaries()
      akeys = list(set(featkeys.keys()).union(set(secondaries.keys())))
      fvals = sorted(featvals.keys())
      kkeys = [context.token('end'),
               # The following are keywords from Meta(Meta).  Whether there
               # are any meta-lang specific keywords has yet to be determined.
               'id', 'xid', 'id-list',
               'word', 'word-list',
               'int', 'real',
               'str', 'type',
               'simple', 'complex']
      bkeys = set()
      for baselang in metalang.bases().values():
        bkeys.update(baselang.KEYWORDS)
      print '  Construct  [%3d]: %s' % (len(ckeys), ' '.join(sorted(ckeys)))
      print '  Attributes [%3d]: %s' % (len(akeys), ' '.join(sorted(akeys)))
      print '  Featvals   [%3d]: %s' % (len(fvals), ' '.join(sorted(fvals)))
      print '  Keywords   [%3d]: %s' % (len(kkeys), ' '.join(sorted(kkeys)))
      print '  Basewords  [%3d]: %s' % (len(bkeys), ' '.join(sorted(bkeys)))

      replacements = {
        '<CONSTRUCTS-HERE>': ' '.join(['"%s"' % e for e in sorted(ckeys)]),
        '<ATTRIBUTE-KEYS-HERE>': ' '.join(['"%s"' % e for e in sorted(akeys)]),
        '<FEATURE-VALUES-HERE>': ' '.join(['"%s"' % e for e in sorted(fvals)]),
        '<KEYWORDS-HERE>': ' '.join(['"%s"' % e for e in sorted(kkeys)]),
        '<BASEWORDS-HERE>': ' '.join(['"%s"' % e for e in sorted(bkeys)]),
        '<SPACE-BINDINGS-HERE>': '',
        '<SECONDARY-FONT-LOCK-HERE>':
        '(cons (concat "\\\\<" metaoopl-keywords-re "\\\\>") font-lock-metaoopl-keyword-face)',
        '<TERTIARY-FONT-LOCK-HERE>':
        '(cons (concat "\\\\<" metaoopl-basewords-re "\\\\>") font-lock-metaoopl-baseword-face)',
      }

      ofp = IO.writer(out_file)
      ifp = IO.reader(in_file)
      replace = False
      for line in ifp:
        if not replace:
          if "'MetaLang' and 'metalang'" in line:
            replace = True
        else:
          line = line.replace('metalang', 'meta%s' % metaid)
          line = line.replace('MetaLang', 'Meta(%s)' % MetaLang)

          reps = []
          for rep in replacements:
            if rep in line:
              line = line.replace(rep, replacements[rep])
              reps.append(rep)
          for rep in reps:
            # These are one-time replacements.
            del replacements[rep]

        ofp.write(line)
      IO.close(ifp)
      IO.close(ofp)

    else:
      print 'ERROR: Invalid metalang %s' % metalang

  def parseMeta(self, metafile_path, destdir=None, debug_level=0):
    """Parse a single meta file.

    Args:
      metafile_path: str
        The file to parse.  All meta code is generated relative
      debug_level: int
        Amount of debugging to perform.
    """
    if destdir is None:
      destdir = os.path.dirname(os.path.abspath(metafile_path))
    metalang = self._metalang
    context = metalang.context()
    metafile = MetaFile(
      metafile_path, context, compiler=self, destdir=destdir,
      debug_level=debug_level)
    metafile.parseFile()
    return metafile

  def parseMetaProgram(self, metafile_paths, debug_level=0):
    metafiles = []
    errors = 0
    for metafile_path in metafile_paths:
      destdir = os.path.dirname(os.path.abspath(metafile_path))
      metafile = self.parseMeta(
        metafile_path, destdir=destdir, debug_level=debug_level)
      metafiles.append(metafile)
      errors += len(metafile.errors())
    return metafiles, errors

  def compileMeta(self, metafile, baselang, debug_level=0, destdir=None,
                  verify=False, config=None):
    """Compile a single meta file into baselanguage source file(s).

    Args:
      metafile: MetaFile
        The parsed metafile to compile.
      baselang: <BaseLanguage
        The BaseLanguage to compile into.
      debug_level: int
        Amount of debugging to perform.
      destdir: str or None
        If None, the same directory as metafile_path.
        Determines where to write generated source code
        (in $destdir/.meta/<baselang>/<namespace_path>/<file>)
      verify: bool
        If True, after parsing the metafile, write out the meta contents to
        a tmp file and compare the original output with the new output. This
        helps detect situations where the parser is not idempotent.
      config: dict or None
        Controls how compilation proceeds.

    Returns: MetaFile
    """
    # Parse the .meta file, ensuring that the result is a single instantation
    # of construct 'Schema' (maybe it should be File' instead?).
    metafile_path = metafile.filename()
    Log.info('Compiler.compileMeta(%s)', metafile_path)
    Log.indent()
    file_cons = metafile.construct()

    if not file_cons:
      # Failed to parse the file.
      metafile.fatal('Failed to parse %s' % metafile_path)
    else:
      if verify:
        print '**************** TODO: Fix this for in-memory use!'
        fd, tmpfile = tempfile.mkstemp(suffix='.meta')
        lines = file_cons.scope().asLines(file_cons.context())
        os.write(fd, '\n'.join(lines))
        os.close(fd)
        print 'WROTE %s' % tmpfile
        subprocess.call(['diff', metafile_path, tmpfile])

      # We ask the file construct to compile itself, relative to a baselang.
      # This will double-dispatch into the BaseLanguage hierarchy, where
      # per-construct-per-baselang compilation takes place.
      if config is None:
        config = {}
      metafile_path = metafile.filename()
      #if destdir is None:
      #  destdir = os.path.dirname(os.path.abspath(metafile_path))
      #env = FileEnv(metafile, self, destdir)
      file_cons.compileMeta(metafile, baselang, config)

      verbose = False  # TODO(wmh): Make this a verbosity flag
      if verbose or metafile.errors():
        metafile.printLog()

    Log.undent()
    return metafile

  def compileMetaProgram(self, metafiles, baselang, debug_level=0, destdir=None,
                         verify=False, config=None):
    """Compile a single meta file into baselanguage source file(s).

    Args:
      metafile_paths: list of str
        The files to parse and compile.
      baselang: <BaseLanguage
        The BaseLanguage to compile into.
      debug_level: int
        Amount of debugging to perform.
      destdir: str or None
        If None, the same directory as metafile.
        Determines where to write generated source code
        (in $destdir/.meta/<baselang>/<namespace_path>/<file>)
      verify: bool
        If True, after parsing the metafile_path, write out the meta contents to
        a tmp file and compare the original output with the new output. This
        helps detect situations where the parser is not idempotent.
      config: dict or None
        Controls how compilation proceeds.

    Returns: int
      Number of meta files that produced errors.
    """
    # Compile the meta files.
    errors = 0
    paths = set()
    for metafile in metafiles:
      self.compileMeta(
        metafile, baselang=baselang, debug_level=debug_level, verify=verify,
        config=config)
      paths.add(metafile.metapath(baselang))
      if metafile.errors():
        errors += 1

    # This performs the highest level of base code compilation possible.
    # For example, in C++, when any C++ source is changed, the entire
    # hierarchy of shared library files needs to be updated.
    #
    # TODO(wmh): The current implementation of this is a bit problematic,
    # because it relies on baselang.env().metapath() to determine the root of
    # the generated meta code, but baselang.env() changes for each metafile
    # processed.  If metafiles with different metapath()s are processed, this
    # implementation will be incorrect, so we check for that situation and
    # raise an exception if it occurs.
    metafile = metafiles[0]
    # metafile = baselang.env()
    if errors <= 0:
      if len(paths) > 1:
        raise meta.compiler.errors.Error(
          'Not currently handling multiple meta paths: %s' % paths)
      elif list(paths)[0] != metafile.metapath(baselang):
        raise meta.compiler.errors.Error(
          'Discrepancy between expected path %s and actual %s' %
          (list(paths)[0], metafile.metapath(baselang)))
      else:
        baselang.compileBaseCode(metafile, construct=None)

    baselang.compileMisc(metafile)

    return errors

  def analyzeMetaMethod(self, method, progress):
    """Analyze a method for meta-level implementation indications.

    Args:
      method: method
        The method to analyze
      progress: Progress
        The progress object in which to add children.  Since the method
        is presumed to be one that implements a construct within a baselang,
        the progress object should represent a construct, and the method
        adds attribute children (and attribute value grandchildren, in the
        case of feature attributes).
    """
    meta_re = re.compile('#!\s*(\S+)(?:\s+(\S+))?\s+(\d+)%')
    srcfile = inspect.getsourcefile(method)
    lines, lineno = inspect.getsourcelines(method)
    for line in lines:
      if '#!' in line:
        meta_match = meta_re.search(line)
        if meta_match:
          attrname, attrval, percent = meta_match.groups()
          percent = int(percent)
          attr_progress = progress.child(attrname)
          if not attr_progress:
            raise meta.compiler.errors.Error(
              'Found %s %s %s%% but no %s in %s' % (
                attrname, attrval, percent, attrname, progress.name()))
          if attrval:
            featval_progress = attr_progress.child(attrval)
            if not featval_progress:
              raise meta.compiler.errors.Error(
                'Found %s %s %s%% but no %s in %s' % (
                attrname, attrval, percent, attrval, attr_progress.name()))
            featval_progress.percentIs(percent)
          else:
            attr_progress.percentIs(percent)
        else:
          raise meta.compiler.errors.Error(
              'Line %d: Found #! but not properly formated: %s' %
              (lineno, line))
      #sys.stdout.write('%4d: %s' % (lineno, line))
      lineno += 1

  def analyzeMeta(self):
    """Establish progress made on implementing Meta.

    For a given MetaLanguage (and its associated Schema)
      For every BaseLanguage B
        For every construct in the schema (and the associated compile_* method in B)
          For every attribute in the construct
            Determine if the compile_* method has a comment in it that indicates
            how much progress has been made.  If a comment does not exist,
            progress is assumed to be 0.
          Determine how much of the construct has been implemented by assessing
          how many of the attributes have been done.  If there are 10
          attributes, one is 100%, one is 50%, and the others are 0% done,
          then the construct itself is 15% done.
        Determine how much of the baselang has been implemented by aggregating
        over all constructs.
    """
    metalang = self._metalang
    schema = metalang.schema()
    construct_constructs = schema.attrval('config')

    meta_progress = Progress(
      metalang.name(), desc='Metalang %s' % metalang.name())

    priority_weights = {'critical': 7, 'major': 5, 'medium': 3, 'minor': 1}

    for baselang in sorted(metalang._list, key=lambda bl: bl.name()):
      #print '%-10s: %s' % (baselang.name(), baselang.__class__.__name__)
      base_progress = meta_progress.registerChild(
        baselang.name(), desc='Baselang %s' % baselang.fullname())

      for construct_construct in construct_constructs:
        #print '  ' + construct_construct.kindid()
        construct_name = construct_construct.id()
        cons_progress = base_progress.registerChild(
          construct_name,
          desc='Construct %s in %s' % (construct_name, baselang.fullname()))

        attribute_constructs = construct_construct.attrval('config')
        cons_weights = {}
        for attribute_construct in attribute_constructs:
          #print '    ' + attribute_construct.kindid()
          attribute_name = attribute_construct.id()
          attr_progress = cons_progress.registerChild(
            attribute_name,
            percent=0,
            desc='Attribute %s.%s within %s' % (
              attribute_name, construct_name, baselang.fullname()))

          kind = attribute_construct.attrval('kind')
          if kind == 'feature':
            # We need to see an indication for every single value.
            featvals = LiteralList(attribute_construct.attrval('type')).data()
            for featval in featvals:
              attr_progress.registerChild(
                featval, percent=0,
                desc='Feature value %s within attribute %s' % (
                  featval, attribute_name))
          else:
            # We just need an indication that this attribute has been addressed.
            pass

          # Establish the priority of this attribute, and add a weight for it.
          priority_attr = attribute_construct.attr('priority', default=None)
          priority = priority_attr.value() if priority_attr else 'minor'
          cons_weights[attribute_name] = priority_weights[priority]
        cons_progress.weightsIs(cons_weights)

        # We establish the method (within baselang) and parse it for percentages.
        method_name = 'compile_%s' % construct_name
        construct_method = getattr(baselang, method_name, None)
        if (construct_method and
            method_name in construct_method.im_class.__dict__):
          self.analyzeMetaMethod(construct_method, cons_progress)
          if True:
            # We can mark the primary construct as 100% if a method exists
            # for the associated construct.
            primary_progress = cons_progress.child(construct_name)
            primary_progress.percentIs(100)
          else:
            # Alternatively, we can simply delete this entry so it doesn't
            # skew our results.
            del cons_progress._children[construct_name]

    return meta_progress

  def htmlProgress(self, fp=sys.stdout, compute=True, closure_path='.'):
    """Generate HTML showing the progress on implementing Meta.

    Args:
      fp: *File
      compute: bool
      closure_path: str
    """
    metalang = self.metalang()
    schema = metalang.schema()
    construct_constructs = schema.attrval('config')

    fp.write("""
<html>
 <head>
  <style>
   span.cell { float: left; width: 70px; text-weight: bold; text-align: right}
   div.clear { clear: both; }
   div.header { border-style: solid; border-width: 1px; }
   div.body { margin: 0px; padding: 0px; }
   .feature { color: purple; }
   .featval { color: green; }
   .primary { color: red; }
   .secondary { color: blue; }
  </style>
  <script src="%s/closure/goog/base.js"></script>
  <script>
    goog.require('goog.ui.Zippy')
  </script>
 </head>
 <body>
""" % closure_path)

    progress = self.analyzeMeta()
    bl_names = progress.names()
    fp.write('  <h1>%s</h1>\n' % metalang.name())

    # Write out the base-language column headers
    fp.write('  <!-- Baselang header -->\n')
    fp.write('  <div>\n   ')
    fp.write('<span class="cell"><b>BaseLang<br>/Construct</b></span>')
    for bl_name in bl_names:
      fp.write('<span class="cell">%s</span>' % bl_name)
    fp.write('\n  </div>\n')
    fp.write('  <div class="clear"></div>\n')

    # For each construct, generate overall progress and per-attribute progress.
    zippies = []
    cons_names = progress.child(bl_names[0]).names()
    #for cons_name in cons_names:
    for construct_construct in construct_constructs:
      cons_name = construct_construct.id()

      fp.write('  <!-- Construct %s -->\n' % cons_name)
      fp.write('  <div>\n')
      fp.write('   <div id="construct-%s-header" class="header">\n' % cons_name)
      fp.write('    <span class="cell">%s</span>' % cons_name)
      zippies.append('construct-%s' % cons_name)
      attr_names = set()
      for bl_name in bl_names:
        bl_progress = progress.child(bl_name)
        cons_prog = bl_progress.child(cons_name)
        attr_names.update(cons_prog.names())
        percent = cons_prog.computePercent() if compute else cons_prog.percent()
        fp.write('<span class="cell">%s</span>' % percent)
      fp.write('\n   </div>\n')
      fp.write('   <div class="clear"></div>\n')
      fp.write('   <div id="construct-%s-body" class="body">\n' % cons_name)

      attribute_constructs = construct_construct.attrval('config')
      for attribute_construct in attribute_constructs:
        attr_name = attribute_construct.id()
        attr_kind = attribute_construct.attrval('kind')

        # Need to establish if this attribute is decomposed into children. We
        # decompose all feature attributes, and may decompose certain
        # secondary attributes, so we do this based on progress, not the
        # attribute_construct itself (i.e. we rely on analyzeMeta() to dictate
        # the child decomposition).
        aprog = progress.child(bl_name).child(cons_name).child(attr_name)
        if not aprog:
          # We (may) delete the primary construct in analyzeMeta(), so we
          # support situations where a child doesn't exist.
          continue
        attr_values = aprog.names()

        attr_prefix = 'attribute-%s-%s' % (cons_name, attr_name)
        if attr_values:
          zippies.append(attr_prefix)

        fp.write('    <!-- Attribute %s-->\n' % attr_name)
        fp.write('    <div>\n')
        fp.write(
          '     <div id="%s-header" class="aheader">'
          '<span class="cell %s">%s</span>' % (attr_prefix, attr_kind, attr_name))
        for bl_name in bl_names:
          bl_progress = progress.child(bl_name)
          cons_prog = bl_progress.child(cons_name)
          attr_prog = cons_prog.child(attr_name)
          percent = (
            attr_prog.computePercent() if compute else attr_prog.percent())
          fp.write('<span class="cell">%s</span>' % percent)
        fp.write('<div class="clear"></div></div>\n')
        if attr_values:
          fp.write(
            '     <div id="%s-body" class="abody">\n' % attr_prefix)
          for attr_val in attr_values:
            fp.write('      <span class="cell featval">%s</span>' % attr_val)
            for bl_name in bl_names:
              bl_progress = progress.child(bl_name)
              cons_prog = bl_progress.child(cons_name)
              attr_prog = cons_prog.child(attr_name)
              attrval_prog = attr_prog.child(attr_val)
              percent = (
                attrval_prog.computePercent() if compute else attrval_prog.percent())
              fp.write('<span class="cell">%s</span>' % percent)
            fp.write('<div class="clear"></div>\n')
          fp.write('     </div>\n')
        fp.write('    </div>\n')
      fp.write('    </div>\n')
      fp.write('   <div class="clear"></div>\n')
      fp.write('  </div>\n')

    # Now generate the zippy code
    fp.write('\n')
    fp.write('  <script>\n')
    i = 0
    for zippy in zippies:
      i += 1
      fp.write(
        '   var z%d = new goog.ui.Zippy("%s-header", "%s-body");\n' %
        (i, zippy, zippy))
    fp.write('  </script>\n')

    fp.write(' </body>\n</html>\n')


class Progress(object):
  """Maintain progress, with support for sub-division of progress."""

  def name(self):
    return self._name

  def percent(self):
    # WARNING: We only compute percent if it is currently None.  If it is
    # mistakenly set to an integer that doesn't represent the full total,
    # this will return that value ... best to call computePercent() to
    # ensure validity.
    result = self._percent
    if result is None:
      # We have not yet computed nested percentage.
      result = self.computePercent()
    return result

  def percentIs(self, percent):
    self._percent = percent

  def weights(self):
    return self._weights

  def weightsIs(self, weights):
    self._weights = weights

  def __init__(self, name, percent=None, desc=None, weights=None):
    super(Progress, self).__init__()
    # field name: str
    #   The name used to represent this progress.
    self._name = name

    # field description: str or None
    #   A description of what this measures progress about.
    self._description = desc

    # field percent: int or None
    #   How much of this thing has been accomplished.
    self._percent = percent

    # field children: dict
    #   Maps conceptual names to Progress instances, representing a
    #   subdivision of progress.
    self._children = {}

    # field weights: dict or None
    #   Weights for each child (or None if children are evenly weighted)
    self._weights = weights

  def names(self):
    return sorted(self._children.keys())

  def child(self, name):
    """Obtain a child progress by name.

    Args:
      name: str
    """
    return self._children.get(name, None)

  def registerChild(self, name, percent=None, desc=None):
    """Create a child Progress instance and add to self.

    Args:
      name: str
        Key to use when adding child to self.
      percent: int or None
        Total progress made on the child.
      desc: str
        A description of what the child measures progress about.

    Returns: Progress
      The newly created child instance.
    """
    child = Progress(name, percent=percent, desc=desc)
    if name in self._children:
      raise meta.compiler.errors.Error(
        'Attempt to reregister %s within %s' % (name, self._name))
    self._children[name] = child
    return child

  def computePercent(self):
    """Compute percent from first principles.

    Raises:
      Error:
        if a Progress instance has no children and a None percent. Leaf
        Progress instances must specify a percentage.

    Returns: int
    """
    children = self._children
    count = len(children)

    if count == 0:
      result = self._percent
      if result is None:
        raise meta.compiler.errors.Error(
          'Leaf progress %s has no progress' % self._name)
    else:
      percent = 0.0
      weights = self._weights
      if weights:
        total_weight = float(sum(weights.values()))
        for cname, weight in weights.iteritems():
          child = children.get(cname, None)
          if child:
            perc = child.computePercent()
            percent += perc * (weight/total_weight)
      else:
        # Simple case: all children evenly weighted
        for cname, child in children.iteritems():
          perc = child.computePercent()
          percent += perc / float(count)
      result = int(percent + 0.5)
    return result

  def show(self, fp=sys.stdout, indent='', compute=False, verbose=False):
    """Print myself.

    Args:
      fp: file-like object
        Where to print.
      indent: str
        What to insert at the beginning of each line.
      compute: bool
        If True, compute percents, othewise rely on cached percent.
      verbose: bool
        If True, show descriptions.
    """
    desc = self._description or '' if verbose else ''
    percent = self.computePercent() if compute else self.percent()
    fp.write('%s%-20s: [%d] %s\n' % (indent, self._name, percent, desc))
    children = self._children
    subindent = indent + '  '
    for cname in sorted(children):
      child = children[cname]
      child.show(fp=fp, indent=subindent)

  def html(self, fp=sys.stdout, indent='  ', compute=True, wrap=False,
           title=None, colrow=None):
    """Dump myself to html.

    Args:
      fp: file-like object
        Where to write the HTML.
      indent: str
      compute: bool
      wrap: bool
        Add in wrapper html.
      title: str
        A description of the table being produced.
      colrow: str
        The text to insert in the top left corner of the table.
    """
    # TODO(wmh): This approach is a bit too coarse-grained.  The
    # functionality here is left in case it is useful elsewhere, but
    # for displaying Meta progress, Compiler.htmlProgress() is better.

    if wrap:
      fp.write('<html>\n')
      fp.write(' <head>\n')
      fp.write('   <style>\n')
      fp.write('     td { text-align: right; }\n')
      fp.write('   </style>\n')
      fp.write(' </head>\n')
      fp.write(' <body>\n')

    columns = sorted(self._children.keys())

    fp.write('\n')
    fp.write('%s<table border="1">\n' % indent)
    fp.write('%s <caption>%s</caption>\n' % (indent, self.name()))

    # Header row (we show the children of self as the columns, and the
    # grandchildren as rows).
    fp.write('%s <tr><th>%s</th>' % (indent, title))
    rowset = set()
    for col in columns:
      fp.write('<th>%s</th>' % col)
      col_prog = self.child(col)
      rowset.update(col_prog._children.keys())
    fp.write('%s </tr>\n' % indent)

    for row in sorted(rowset):
      fp.write('%s <tr>\n' % indent)
      fp.write('%s  <th>%s</th>\n' % (indent, row))
      for col in columns:
        col_prog = self.child(col)
        row_prog = col_prog.child(row)
        if row_prog:
          percent = self.computePercent() if compute else self.percent()
        else:
          percent = ''
        fp.write('%s  <td>%s</td>\n' % (indent, percent))
      fp.write('%s </tr>\n' % indent)
    fp.write('%s</table>\n' % indent)
