#!/usr/bin/python
"""
tpe_compile_test.py: Tests for tpe_compile.py
"""

from pprint import pprint
import unittest

import tpe_bootstrap
import tpe_lex
import tpe_nodes
import tpe_compile  # module under test


tpe_lexer = tpe_lex.GetLexer()


class TpeParseTest(unittest.TestCase):

  def setUp(self):
    self.tpe_parser = tpe_bootstrap.GetParser()

  def parse_tpe(self, *args, **kwargs):
    return self.tpe_parser.parse(*args, **kwargs)

  def testBootstrap(self):
    """Test that bootstrap sexps equals the parsed tpe.tpe file."""

    # First lex it
    e = open('annex/tpe.tpe').read()

    tokens = list(tpe_lexer.run(e))

    pprint(tokens)

    # Then feed tokens to the bootstrap grammar.
    v, pos = self.parse_tpe(tokens, start='Start')
    print 'pos', pos

    # Test that it matches the bootstrap.
    pprint(v)
    self.assertNotEqual(None, v)
    for name, value in v.iteritems():
      print 'testing', name
      self.assertEqual(tpe_bootstrap.DEFS[name], value)
    print pos


class MetaGrammarTest(unittest.TestCase):
  """Break it down into different definitions."""

  def setUp(self):
    self.tpe_parser = tpe_bootstrap.GetParser()

  def parse_tpe(self, *args, **kwargs):
    return self.tpe_parser.parse(*args, **kwargs)

  def testPrimary(self):
    # A reference to another thing
    tokens = list(tpe_lexer.run("Number"))
    print 'GRAMMAR tokens'
    pprint(tokens)

    v, pos = self.parse_tpe(tokens, 'Primary')
    # We get an identifier token back after parsing Primary.
    #self.assertEqual(('IDENTIFIER', 'Number', 0), v)
    pprint(v)

    tokens = list(tpe_lexer.run("SIGN"))
    v, pos = self.parse_tpe(tokens, 'Primary')
    #self.assertEqual(('TOKEN', 'SIGN', 0), v)
    pprint(v)

  def testSuffix(self):
    # A reference to another thing
    tokens = list(tpe_lexer.run("Number+"))
    print 'GRAMMAR tokens'
    pprint(tokens)

    v, pos = self.parse_tpe(tokens, 'Suffix')

    # We get an identifier token back after parsing Primary.
    #self.assertEqual(('IDENTIFIER', 'Number', 0), v)
    pprint(v)

  def testPrefix(self):
    # A reference to another thing
    tokens = list(tpe_lexer.run("&Number"))
    print 'GRAMMAR tokens'
    pprint(tokens)

    v, pos = self.parse_tpe(tokens, 'Prefix')

    # We get an identifier token back after parsing Primary.
    #self.assertEqual(('IDENTIFIER', 'Number', 0), v)
    pprint(v)

  def testSequence(self):
    tokens = list(tpe_lexer.run("Number Letter"))
    pprint(tokens)

    v, pos = self.parse_tpe(tokens, 'Sequence')
    self.assertEqual(
        v,
        ['Seq', ['Ref', 'Number'], ['Ref', 'Letter']])

    # TODO: test empty tokens.
    #tokens = list(tpe_lexer.run(" "))
    #v, pos = mat(grammar, tokens)

    # We get an identifier token back after parsing Primary.
    #self.assertEqual(('IDENTIFIER', 'Number', 0), v)
    pprint(v)

  def testChoice(self):
    tokens = list(tpe_lexer.run("Number / Letter"))
    v, pos = self.parse_tpe(tokens, 'Choice')

    # We get an identifier token back after parsing Primary.
    #self.assertEqual(('IDENTIFIER', 'Number', 0), v)
    pprint(v)

    tokens = list(tpe_lexer.run("Number / Letter / Punc"))
    v, pos = self.parse_tpe(tokens, 'Choice')
    pprint(v)

  def testDefinition(self):
    tokens = list(tpe_lexer.run("Sum <- Number"))
    v, pos = self.parse_tpe(tokens, 'Definition')
    pprint(v)

    tokens = list(tpe_lexer.run("Sum <- Number / Letter"))
    v, pos = self.parse_tpe(tokens, 'Definition')
    pprint(v)

    tokens = list(tpe_lexer.run("Sum <- Number (SIGN Number)*"))
    v, pos = self.parse_tpe(tokens, 'Definition')
    pprint(v)

  def testStart(self):
    tokens = list(tpe_lexer.run("Sum <- Number  Number <- Integer"))
    v, pos = self.parse_tpe(tokens, 'Definition')
    pprint(v)


class CalcTest(unittest.TestCase):
  """Test the calculator grammar with the bootstrap grammar."""

  def setUp(self):
    self.tpe_parser = tpe_bootstrap.GetParser()

  def parse_tpe(self, *args, **kwargs):
    return self.tpe_parser.parse(*args, **kwargs)

  def testIdentifierAsMany(self):
    # Test a simple identifier.
    tokens = list(tpe_lexer.run('Number'))

    print tokens
    self.assertEqual(1, len(tokens))
    # The tokenizer recognizes this as an identifier (not a token).
    self.assertEqual(
        ('IDENTIFIER', 'Number', 0), tokens[0])

    # This compiles into a reference to a number, as Primary, Suffix, etc.
    v, pos = self.parse_tpe(tokens, 'Primary')
    self.assertEqual(['Ref', 'Number'], v)

    v, pos = self.parse_tpe(tokens, 'Suffix')
    self.assertEqual(['Ref', 'Number'], v)

    v, pos = self.parse_tpe(tokens, 'Prefix')
    self.assertEqual(['Ref', 'Number'], v)

    v, pos = self.parse_tpe(tokens, 'Sequence')
    self.assertEqual(['Ref', 'Number'], v)

    v, pos = self.parse_tpe(tokens, 'Choice')
    self.assertEqual(['Ref', 'Number'], v)

  def testTokenAsMany(self):
    # Test a simple identifier.
    tokens = list(tpe_lexer.run('SIGN'))

    print tokens
    self.assertEqual(1, len(tokens))
    # The tokenizer recognizes this as an identifier (not a token).
    self.assertEqual(
        ('TOKEN', 'SIGN', 0), tokens[0])

    # This compiles into a reference to a number, as Primary, Suffix, etc.
    v, pos = self.parse_tpe(tokens, 'Primary')
    self.assertEqual(['TokenRef', 'SIGN'], v)

    # This compiles into a reference to a number, as Primary, Suffix, etc.
    v, pos = self.parse_tpe(tokens, 'Choice')
    self.assertEqual(['TokenRef', 'SIGN'], v)

  def testParensAsMany(self):
    # Test a simple identifier.
    tokens = list(tpe_lexer.run('(SIGN)'))

    print tokens
    self.assertEqual(3, len(tokens))
    # The tokenizer recognizes this as an identifier (not a token).
    self.assertEqual(
        [ ('(', None, 0), 
          ('TOKEN', 'SIGN', 1), 
          (')', None, 2), 
          ],
        tokens)

    # This compiles into a reference to a number, as Primary, Suffix, etc.
    v, pos = self.parse_tpe(tokens, 'Primary')
    self.assertEqual(['TokenRef', 'SIGN'], v)

    v, pos = self.parse_tpe(tokens, 'Choice')
    self.assertEqual(['TokenRef', 'SIGN'], v)

  def testCalc(self):
    # Metagrammar
    # Repeat of examples/calc.py
    calc = """\
Sum    <- Number (SIGN Number)*
Number <- REAL / INTEGER
"""

    # now lex the grammar
    lexer = tpe_lex.GetLexer()
    print 'GRAMMAR str',calc 
    calc_tokens = list(lexer.run(calc))
    print 'GRAMMAR tokens'
    pprint(calc_tokens)

    v, pos = self.parse_tpe(calc_tokens, 'Start')
    pprint(v)
    #print mat(calc, '1')


if __name__ == '__main__':
  unittest.main()
