#!/bin/env python

from grammar import *
from parser.parser  import *
from parser.lexer   import *

class Dot:
    def __init__(self, rule, look = SymbolType.tEnd, dot = 0):
        self.rule = rule
        self.look = look
        self.dot  = dot

    def symbol(self): return self.rule[self.dot]
    def atEnd (self): return self.symbol() == SymbolType.tEnd
    def suffix(self): return self.rule.right().suffix(self.dot + 1, self.look)
    def __repr__(self): return repr(self.dot) + ": " + repr(self.rule) + ", " + repr(self.look)

class Dotter:
    def __init__(self, rules):
        self.dots = {}
        for r in rules:
            self.dots[r] = []
            for i in xrange(r.right().length + 1): self.dots[r].append({})

    def get(self, rule, look, dot = 0):
        d = self.dots[rule][dot]
        if not look in d: d[look] = Dot(rule, look, dot)
        return d[look]

class ConflictInState(Exception):
    def __init__(self, s, c): Exception.__init__(self, repr(s) + "\n" + repr(c))

class StateGenerator:
    def __init__(self, generator, state, dots):
        self.generator = generator
        self.state  = state
        self.dots   = dots
        self.follow = {None:[]}
        queue       = list(self.dots)
        dotset      = set(self.dots)
        while len(queue) > 0:
            d = queue.pop()
            s = d.symbol()
            self.follow.setdefault(s, []).append(d)
            for r in self.generator.processed.rulesFor(s):
                for l in generator.processed.FIRST(d.suffix()):
                    ad = self.generator.dot(r, l)
                    if ad not in dotset: queue.append(ad)
                    dotset.add(ad)

    def move(self):
        try:
            dotsets = {}
            for sc, dots in self.follow.items():
                nd = set()
                for dot in dots:
                    if dot.atEnd(): self.end(dot.look, dot.rule)
                    else          : nd.add(self.generator.move(dot))
                if sc is None: rep = [None]
                else: rep = sc.represents()
                if len(nd) > 0:
                    for s in rep: dotsets.setdefault(s, set()).update(nd)
            for s, ndts in dotsets.items(): self.shift(s, self.generator.state(ndts))
        except Conflict, conflict : raise ConflictInState(self, conflict)

    def __repr__(self):
        f = "\n".join([repr(l) + "\t" + "\n\t".join(map(repr, i)) for l,i in self.follow.items()])
        return "\n" + self.state.__rrepr__() + "\n\t" + f + "\n"

    def __eq__(self, dots       ): return self.dots == dots
    def end   (self, look, rule ): raise AbstractMethodError(self, self.end)
    def shift (self, look, state): raise AbstractMethodError(self, self.shift)

class Generator:
    look = SymbolType.tEnd
    def __init__(self, grammar):
        self.states = []
        self.queue  = []
        self.grammar = grammar.augment()
        self.processed = GrammarProcessor(self.grammar)
        self.dotter  = Dotter(self.grammar.rules())
        self.start   = self.state(self.initialDots(self.processed.rulesFor()))
        while len(self.queue) > 0: self.queue.pop().move()

    def initialDots(self, rules):
        dots = set()
        for look in self.processed.first(self.look):
            for rule in rules: dots.add(self.dot(rule, look))
        return dots

    def state(self, dots): return self.stateGenerator(dots).state

    def stateGenerator(self, dots):
        for s in self.states:
            if s == dots: return s
        s = self.createState(len(self.states), dots)
        self.states.append(s)
        self.queue.append(s)
        return s

    def dot(self, rule, look      ): return self.dotter.get(rule, look)
    def move(self, dot            ): return self.dotter.get(dot.rule, dot.look, dot.dot + 1)
    def get(self, tokenizer       ): raise AbstractMethodError(self, self.get)
    def getStates(self            ): return self.start
    def createState(self, id, dots): raise AbstractMethodError(self, self.createState)

class ParserStateGenerator(StateGenerator):
    action = {Grammar.nAugmented: Accept}
    def end  (self, look, rule ): self.state.addAction(look, self.action.get(rule.left(), Reduce)(rule))
    def shift(self, look, state): self.state.addAction(look, Shift (state))

class ParserGenerator(Generator):
    def createState(self, id, dots): return ParserStateGenerator(self, ParserState(id, self.processed.ignore()), dots)
    def get(self, tokenizer)       : return Parser(self.start, tokenizer)

class LexerStateGenerator(StateGenerator):
    def end  (self, look, rule ): self.state.setEnd (look, rule )
    def shift(self, look, state):
        if self.generator.processed.isTerminal(look): self.state.setNext(look, state)

class LexerGenerator(Generator):
    def __init__(self, grammar):
        self.look = grammar.s
        Generator.__init__(self, grammar)

    def createState(self, id, dots): return LexerStateGenerator(self, LexerState(id), dots)
    def get(self, tokenizer)       : return Lexer(self.start, tokenizer)
