#!/usr/bin/python -S
"""
tokenize_test.py: Tests for tokenize.py
"""

__author__ = 'Andy Chu'


import os
import re
import sys

if __name__ == '__main__':
  # for testy
  sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..',
    '..', 'svn', 'pan', 'trunk'))

from pan.core import cmdapp
from pan.core import params
from pan.core import json
from pan.core import util
from pan.test import testy

import tokenize  # module under test


class Verifier(testy.StandardVerifier):

  def TokenizePatternLine(self, s, tokens):
    self.Equal(list(tokenize.TokenizePatternLine(s)), tokens)

  def LineRe(self, s, expected):
    self.RegexGroups(tokenize.LINE_RE, s, expected)

  def CombineSpecial(self, old, new):
    self.Equal(list(tokenize.CombineSpecial(old)), new)

  def Tokenize(self, s, tokens):
    self.Equal(list(tokenize.Tokenize(s)), tokens)


class TokenizeTest(testy.Test):

  VERIFIERS = [Verifier]

  def testParseRepetitions(self):
    f = tokenize.ParseRepetitions
    self.verify.Equal(f('*'), (0, None))
    self.verify.Equal(f('35'), (35, 35))
    self.verify.Equal(f('4,35'), (4, 35))
    self.verify.Equal(f('4,'), (4, None))
    self.verify.Equal(f(',35'), (0, 35))

  def testTokenize(self):
    # Regex -> literal optimization
    self.verify.Tokenize(
        'P| foo',
        [(tokenize.LITERAL_TOKEN, 'foo', 0)])

    self.verify.Tokenize(
        'P| foo.',
        [(tokenize.REGEX_TOKEN, 'foo.', 0)])

    self.verify.Tokenize(
        '/ignore-case P| foo',
        [(tokenize.REGEX_TOKEN, 'foo', re.IGNORECASE)])

  def testTokenizeLiteralMode(self):
    # Regex -> literal optimization
    self.verify.Tokenize(
        'L| foo {foo} foo',
        [(tokenize.LITERAL_TOKEN, 'foo', 0),
         (tokenize.INSTRUCTION_TOKEN, 'foo', 0),
         (tokenize.LITERAL_TOKEN, 'foo', 0),
         ])

    # Now with significant whitespace
    self.verify.Tokenize(
        'LS| foo {foo} foo',
        [(tokenize.LITERAL_TOKEN, 'foo ', 0),
         (tokenize.INSTRUCTION_TOKEN, 'foo', 0),
         (tokenize.LITERAL_TOKEN, ' foo', 0),
         ])

  def testTokenizeEscaping(self):
    self.verify.TokenizePatternLine(
        r'{/value foo}f{/end}',
        [ (1, '/value foo'),
          (0, 'f'),
          (1, '/end'),
          ])

    self.verify.TokenizePatternLine(
        r'{/value foo}\{{/end}',
        [ (1, '/value foo'),
          (0, r'\{'),
          (1, '/end'),
          ])

    self.verify.TokenizePatternLine(
        r'{/value foo}\\{/end}',
        [ (1, '/value foo'),
          (0, r'\\'),
          (1, '/end')
          ])

    self.verify.TokenizePatternLine(r'\a', [(0, r'\a')])

  def testTokenizeSpecialCases(self):
    self.verify.TokenizePatternLine(
        r'{/value foo}a{3,6}b{/end}',
        [ (1, '/value foo'),
          (0, 'a{3,6}b'),
          (1, '/end'),
          ])

    # At the beginning
    self.verify.TokenizePatternLine(
        r'{3,6}b{end}',
        [ (0, '{3,6}b'),
          (1, 'end'),
          ])

    # At the end
    self.verify.TokenizePatternLine(
        r'{/value foo}a{3,6}',
        [ (1, '/value foo'),
          (0, 'a{3,6}'),
          ])

  def testTokenizeWithInstructionOnLine(self):
    self.verify.TokenizePatternLine("""\
  {value foo}
a{3,6}b
  {end}""",
        [ (1, 'value foo'),
          (0, 'a{3,6}b'),
          (1, 'end'),
          ])

  def testEscapedBraces(self):
    self.verify.TokenizePatternLine(
        r'a(\{3,6\})b',
        [ (tokenize.REGEX_TOKEN, r'a(\{3,6\})b') ])

  def testCombineSpecial(self):
    self.verify.CombineSpecial(
        ['a', '{yo}', 'b', '{hello}', 'c'],
        ['a', 'yo', 'b', 'hello', 'c'])

    self.verify.CombineSpecial(
        ['a', '{.lbrace}', 'b', '{.rbrace}', 'c'],
        [r'a\{b\}c'])

    self.verify.CombineSpecial(
        ['a', '{.lbrace}', 'b', '{.rbrace}', 'c', '{:int}'],
        [r'a\{b\}c', ':int'])

  def testLineRe(self):
    self.verify.LineRe('', None)  # Needs a |
    self.verify.LineRe('|', (None, '', ''))
    self.verify.LineRe('P|', (None, 'P', ''))
    self.verify.LineRe('P| {hello}', (None, 'P', '{hello}'))
    self.verify.LineRe('/ignore-case P|', ('ignore-case', 'P', ''))
    self.verify.LineRe(
        '/ignore-case P| {hello}', ('ignore-case', 'P', '{hello}'))
    self.verify.LineRe('| {hello}', (None, '', '{hello}'))


class HelpersTest(testy.Test):

  LABELS = ['multilanguage']

  def testMakeFlags(self):
    self.verify.Equal(
        tokenize._MakeFlags('ignore-case'), re.IGNORECASE)
    self.verify.Equal(
        tokenize._MakeFlags('ignore-case/verbose'),
        re.IGNORECASE | re.VERBOSE)

  def assertStringLiteral(self, string, expected):
    self.verify.Equal(
        not bool(tokenize.REGEX_RE.search(string)), expected)

  def testStringLiteral(self):
    self.assertStringLiteral('a', True)
    self.assertStringLiteral('here-or-there!', True)
    self.assertStringLiteral('^', False)
    self.assertStringLiteral('\\', False)
    self.assertStringLiteral(r'\d', False)
    self.assertStringLiteral(r'(a+)', False)
    self.assertStringLiteral(r'a+', False)

  def testCharClass(self):
    self.verify.Equal(
        tokenize.MakeCharClass('.', 'a'), '[a]'),
    self.verify.Equal(
        tokenize.MakeCharClass('.', 'a-z'), '[a-z]'),
    self.verify.Equal(
        tokenize.MakeCharClass('!', 'a-z'), '[^a-z]'),
    self.verify.Equal(
        tokenize.MakeCharClass('!', '- ^'), r'[^\-\^]'),
    self.verify.Equal(
        tokenize.MakeCharClass('!', '- .space'), r'[^\- ]'),
    self.verify.Equal(
        tokenize.MakeCharClass('.', '[ ] .word'), r'[\[\]\w]'),


if __name__ == '__main__':
  testy.RunThisModule()
