from .compat import *
from .tokens import Tokenizer
from .automata import Automaton
from .grammars import Symbol, Grammar

class _Regexp :
    def __init__ (self, automaton) :
        self.automaton = automaton
    def __str__ (self) :
        return self.automaton.name
    def _add (self, target) :
        shift = len(target)
        for name, state in self.automaton :
            target.add_state(shift + name, initial=False,
                             final=state.final)
        for name, state in self.automaton :
            for label, next in state :
                target.add_transition(shift+name, shift+next, label)
    def __or__ (self, other) :
        try :
            return other.__ior__(self)
        except :
            pass
        result = Automaton("(%s|%s)" % (self.automaton.name, other.automaton.name))
        result.add_state(0, initial=True, final=False)
        self._add(result)
        other._add(result)
        result.add_transition(0, self.automaton.start+1, "")
        result.add_transition(0, other.automaton.start+len(self.automaton)+1, "")
        return _Regexp(result)
    def star (self) :
        result = self.pstar()
        result.automaton.name = "(%s*)" % self.automaton.name
        result.automaton[result.automaton.start].final = True
        return result
    def pstar (self) :
        result = Automaton("(%s+)" % self.automaton.name)
        self._add(result)
        result.start = self.automaton.start
        for name, state in result :
            if state.final :
                result.add_transition(name, result.start, "")
        return _Regexp(result)
    def __and__ (self, other) :
        try :
            return other.__iand__(self)
        except :
            pass
        result = Automaton(self.automaton.name + other.automaton.name)
        self._add(result)
        result.start = self.automaton.start
        other._add(result)
        target = len(self.automaton)
        for name, _state in self.automaton :
            state = result[name]
            if state.final :
                state.final = False
                result.add_transition(name, target, "")
        return _Regexp(result)

class Word (_Regexp) :
    def __init__ (self, word) :
        self.automaton = Automaton(word)
        if word == "" :
            self.automaton.name = "<epsilon>"
        self.automaton.add_state(0, initial=True, final=False)
        last = 0
        for label in word :
            last += 1
            self.automaton.add_state(last, initial=False, final=False)
            self.automaton.add_transition(last-1, last, label)
        self.automaton[last].final = True

class Empty (_Regexp) :
    def __init__ (self) :
        self.automaton = Automaton("<empty>")
        self.automaton.add_state(0, initial=True, final=False)
        self.automaton.add_state(1, initial=False, final=True)
    def __and__ (self, other) :
        return self
    def __iand__ (self, other) :
        return self
    def __or__ (self, other) :
        return other
    def __ior__ (self, other) :
        return other
    def star (self) :
        return self
    def pstar (self) :
        return self

class Any (_Regexp) :
    def __init__ (self, letters) :
        self.automaton = Automaton("[%s]" % letters)
        self.automaton.add_state(0, initial=True, final=False)
        self.automaton.add_state(1, initial=False, final=True)
        for label in letters :
            self.automaton.add_transition(0, 1, label)

class AnyStar (_Regexp) :
    def __init__ (self, letters) :
        self.automaton = Automaton("([%s]*)" % letters)
        self.automaton.add_state(0, initial=True, final=True)
        for label in letters :
            self.automaton.add_transition(0, 0, label)

lexer = Tokenizer()

lexer.add("chars", "\w+")
lexer.add("b_group", "\(")
lexer.add("e_group", "\)")
lexer.add("b_any", "\[")
lexer.add("e_any", "\]")
lexer.add("dash", "-")
lexer.add("or", "\|")
lexer.add("option", "[*+?]")

RE = Symbol("RE", final=False)
OPTION = Symbol("OPTION", final=False)
ATOM = Symbol("ATOM", final=False)
ANY = Symbol("ANY", final=False)

def _or_ (symbol, right) :
    symbol.regexp = right[0].regexp | right[2].regexp

def _concat_ (symbol, right) :
    symbol.regexp = right[0].regexp & right[1].regexp

def _option_ (symbol, right) :
    if right[0].value is None :
        if right[1].value == "+" :
            symbol.regexp = right[0].regexp.pstar()
        elif right[1].value == "*" :
            symbol.regexp = right[0].regexp.star()
        elif right[1].value == "?" :
            symbol.regexp = right[0].regexp | Word("")
    else :
        if right[1].value == "+" :
            symbol.regexp = Word(right[0].value[:-1]) & Word(right[0].value[-1]).pstar()
        elif right[1].value == "*" :
            symbol.regexp = Word(right[0].value[:-1]) & AnyStar(right[0].value[-1])
        elif right[1].value == "?" :
            symbol.regexp = Word(right[0].value[:-1]) & (Word(right[0].value[-1])()
                                                         | Word(""))

def _copy_ (symbol, right) :
    symbol.regexp = right[0].regexp

def _chars_ (symbol, right) :
    symbol.regexp = Word(right[0].value)
    symbol.value = right[0].value

def _group_ (symbol, right) :
    symbol.regexp = right[1].regexp

def _any_ (symbol, right) :
    symbol.regexp = Any(right[1].value)

def _range_ (symbol, right) :
    chars = [right[0].value[:-1] + right[2].value[1:]]
    chars.extend([chr(i) for i in range(ord(right[0].value[-1]),
                                        ord(right[2].value[0])+1)])
    symbol.value = "".join(chars)

gram = Grammar()
gram.add(RE, [OPTION], _copy_)
gram.add(RE, [RE, lexer["or"], OPTION], _or_)
gram.add(RE, [RE, OPTION], _concat_)
gram.add(OPTION, [ATOM, lexer["option"]], _option_)
gram.add(OPTION, [ATOM], _copy_)
gram.add(ATOM, [lexer["chars"]], _chars_)
gram.add(ATOM, [lexer["b_group"], RE, lexer["e_group"]], _group_)
gram.add(ATOM, [lexer["b_any"], ANY, lexer["e_any"]], _any_)
gram.add(ANY, [lexer["chars"], lexer["dash"], lexer["chars"]], _range_)
gram.add(ANY, [lexer["chars"]], _chars_)

parser = gram.SLR()

def compile (expr, determinize=False, minimize=False) :
    try :
        tree = parser.match(lexer.tokenize(expr))
        if tree is None :
            raise SyntaxError
    except SyntaxError :
        raise
    tree.regexp.automaton.name = expr
    if minimize :
        return tree.regexp.automaton.determinize().minimize().renumber()
    elif determinize :
        return tree.regexp.automaton.determinize().renumber()
    else :
        return tree.regexp.automaton

__all__ = ["compile", "Word", "Any", "AnyStar"]
