from .compat import *
import re

from .grammars import Symbol

class Tokenizer :
    def __init__ (self) :
        self._tok = {"IGNORE" : "\s+"}
        self._type = {}
        self._compile()
    def _compile (self) :
        self._r = re.compile("^%s" % "|".join(["(?P<%s>%s)" % (name, regexp)
                                               for name, regexp
                                               in list(self._tok.items())]),
                             re.MULTILINE)
    def add (self, name, regexp, type=str) :
        self._tok[name] = regexp
        self._type[name] = type
        self._compile()
    def __getitem__ (self, name) :
        if name in self._tok :
            return Symbol(name, final=True)
        else :
            raise KeyError("unknown token")
    def tokenize (self, text) :
        result = []
        i = 0
        while i < len(text) :
            match = self._r.match(text[i:])
            if match is None :
                raise ValueError("invalid input string")
            for group, value in list(match.groupdict().items()) :
                if value is not None :
                    i += match.end(group)
                    if group != "IGNORE" :
                        result.append(Symbol(group, final=True,
                                             value=self._type[group](match.group(group))))
                    break
        return result
