from cheqed.pyparsing import *
from cheqed.core import qtype, qterm, sequent, unification

LEFT = opAssoc.LEFT
RIGHT = opAssoc.RIGHT

class QVarBuilder:
    def __init__(self):
        self.vars = {}

    def build(self, string, location, tokens):
        assert len(tokens) == 1
        try:
            return self.vars[tokens[0]]
        except KeyError:
            self.vars[tokens[0]] = qtype.qvar()
            return self.vars[tokens[0]]

def make_qtype_parser():
    ident = Word(alphanums + '_')
    qmark = Literal('?').suppress()
    var_builder = QVarBuilder()
    var = (qmark + ident).setParseAction(var_builder.build)
    
    obj = Literal('obj').setParseAction(lambda x, y, z: qtype.qobj())
    cls = Literal('class').setParseAction(lambda x, y, z: qtype.qclass())
    bool = Literal('bool').setParseAction(lambda x, y, z: qtype.qbool())
    atom = obj | bool | cls | var

    fun = Literal('->').suppress()
    def parse_fun(string, location, tokens):
        tokens = tokens[0]
        result = tokens.pop()
        while tokens:
            result = qtype.qfun(tokens.pop(), result)
        return result

    ops = [
        (fun, 2, opAssoc.LEFT, parse_fun),
        ]
    
    return operatorPrecedence(atom, ops)

def parse_qtype(string):
    qtype_parser = make_qtype_parser()
    return qtype_parser.parseString(string).asList()[0]

pq = parse_qtype
pty = parse_qtype
parse = parse_qtype

def parse_combo(op, arity, string, location, tokens):
#    assert len(tokens) == arity
    result = op
    while tokens:
        result = qterm.Combination(result, tokens.pop(0))
    return result

# Python's lack of real closures necessitates this
# lambda wrapper. Wish I'd realized this 5 hours ago...
def make_combo(op, arity):
    return lambda s, l, t: parse_combo(op, arity, s, l, t)

def make_simple_combo(string, location, tokens):
    result = tokens.pop(0)
    while tokens:
        result = qterm.Combination(result, tokens.pop(0))
    return result

# what are binders going to look like?
# forall x . phi(x)
# forall x . x in X and phi(x)
# exists x . phi(x)
# exists_uniqe x . phi(x)
# set x . phi(x)
# set x . x in X and phi(x)
# and maybe, someday:
# forall x in X . phi(x)
def parse_binder(binder, string, location, tokens):
    assert len(tokens) == 2
    return qterm.Combination(binder,
                             qterm.Abstraction(tokens[0], tokens[1]))

def make_binder(binder):
    return lambda s, l, t: parse_binder(binder, s, l, t)

def make_abstraction(string, location, tokens):
    return qterm.Abstraction(tokens[0], tokens[1])

class ParseError(Exception):
    pass

class TermPrinter(object):
    def __init__(self, parser):
        self.parser = parser
        self.ops = {}
        for name, arity, associativity in parser.ops:
            self.ops[name] = arity
            
        self.binders = parser.binders

    def termlist(self, termlist):
        return ', '.join([self.term(term) for term in termlist])
        
    def sequent(self, sequent):
        return '%s |- %s' % (self.termlist(sequent.left),
                             self.termlist(sequent.right))

    def term(self, term):
        if term.is_constant or term.is_variable:
            return term.name
        if term.is_abstraction:
            return '(\\%s.%s)' % (self.term(term.bound),
                                  self.term(term.body))
        if term.is_combination:
            for op, arity, associativity in self.parser.ops:
                if arity == 1:
                    pattern = self.parser.parse('%s a' % op.name)
                    try:
                        match = qterm.match(pattern, term)
                    except unification.UnificationError:
                        continue
                    return '(%s %s)' % (op.name, self.term(match['a']))
                elif arity == 2:
                    s = 'a %s b' % op.name
                    pattern = self.parser.parse(s)
                    try:
                        match = qterm.match(pattern, term)
                    except unification.UnificationError:
                        continue
                    return '(%s %s %s)' % (self.term(match['a']),
                                         op.name,
                                         self.term(match['b']))
            for binder in self.binders:
                pattern = self.parser.parse('%s a . b' % binder.name)
                try:
                    match = qterm.match(pattern, term)
                except unification.UnificationError:
                    continue
                return '(%s %s . %s)' % (binder.name,
                                       self.term(match['a']),
                                       self.term(match['b']))

            def uncurry(term):
                if not term.is_combination:
                    return term, []
                operator, operands = uncurry(term.operator)
                return operator, operands + [term.operand]

            operator, operands = uncurry(term)
            return '%s(%s)' % (self.term(operator),
                               ', '.join([self.term(rand) for rand in operands]))

def make_prefix_op(op):
    return lambda s, l, t: op
    
class TermParser(object):
    def __init__(self):
        self.ops = []
        self.binders = []
        self._parser = None
        self._cache = {}

    def add_op(self, op, arity, associativity):
        assert op.role == 'constant'
        self.ops.append((op, arity, associativity))

    def add_binder(self, binder):
        assert binder.qtype.name == 'fun'
        assert binder.qtype.args[0].name == 'fun'
        self.binders.append(binder)

    def _make_ident_parser(self):
        def parse_untyped_ident(string, location, tokens):
            assert len(tokens) == 1
            return qterm.Variable(tokens[0], qtype.qvar())
        
        def parse_typed_ident(string, location, tokens):
            var = tokens[0]
            assert var.qtype.is_variable
            return var.substitute_type(tokens[1], var.qtype)

        col = Literal(':').suppress()

        qtype_expr = make_qtype_parser()

        untyped_ident = Word(alphanums + '_')
        typed_ident = untyped_ident + col + qtype_expr
        
        untyped_ident.setParseAction(parse_untyped_ident)
        typed_ident.setParseAction(parse_typed_ident)

        return untyped_ident ^ typed_ident

    def make_sequent(self, string, location, tokens):
        return sequent.Sequent(tokens[0], tokens[1])
    
    def _make_sequent_parser(self):
        term = self._make_term_parser()
        comma = Literal(',').suppress()
        term_list = Group(Optional(term) + ZeroOrMore(comma + term))

        turnstile = Literal('|-').suppress()
        seq = term_list + turnstile + term_list + Suppress(LineEnd())
        seq.setParseAction(self.make_sequent)

        return seq
    
    def _make_term_parser(self):
        expr = Forward()

        lpar = Literal('(').suppress()
        rpar = Literal(')').suppress()
        comma = Literal(',').suppress()
        ident = self._make_ident_parser()
        
        last = ident | (lpar + expr + rpar)

        this = Forward()
        arglist = Forward()

        # prefix notation
        prefix_ops = []
        for op, arity, associativity in self.ops:
            this = Forward()

            if op.name == '=':
                keyword = Literal(op.name).suppress()
            else:
                keyword = Keyword(op.name).suppress()

            match = lpar + keyword + rpar
            match.setParseAction(make_prefix_op(op))
            last = match | last

        # prefix notation for binders
        for binder in self.binders:
            this = Forward()

            keyword = Keyword(binder.name).suppress()

            match = lpar + keyword + rpar
            match.setParseAction(make_prefix_op(binder))
            last = match | last            
        
        match = last + arglist
        match.setParseAction(make_simple_combo)
        this << (match | last)
        last = this

        for op, arity, associativity in self.ops:
            this = Forward()

            if op.name == '=':
                keyword = Literal(op.name).suppress()
            else:
                keyword = Keyword(op.name).suppress()
                
            if arity == 1:
                match = keyword + last
            elif arity == 2:
                match = last + keyword + this

            match.setParseAction(make_combo(op, arity))
            this << (match | last)
            last = this

        # binders
        dot = Literal('.').suppress()
        for binder in self.binders:
            this = Forward()
            
            keyword = Keyword(binder.name).suppress()
            match = keyword + this + dot + this
            match.setParseAction(make_binder(binder))
            
            this << (match | last)
            last = this      

        # abstractions
        lambda_ = Literal('\\').suppress()
        this = Forward()
        match = lambda_ + this + dot + this
        match.setParseAction(make_abstraction)
        this << (match | last)
        last = this
        
        # argument lists for combinations
        arglist << (lpar + (ZeroOrMore(last + comma) + last) + rpar)

        expr << last
        expr.enablePackrat()
        expression = expr

        return expression

    def make_line_parser(self):
        term = self._make_term_parser()
        parser = term + Suppress(LineEnd())
        return parser
    
    def parse(self, string):
        if string in self._cache:
            return self._cache[string]

        if self._parser is None:
            self._parser = self.make_line_parser()

        parsed = self._parser.parseString(string).asList()[0]
        self._cache[string] = parsed
        return parsed

    def parse_combo(self, string):
        result = self.parse(string)
        if result.role != 'combination':
            raise ParseError('%s is not a combination.' % result)
        return result

    def parse_sequent(self, string):
        result = self._make_sequent_parser().parseString(string)
        return result.asList()[0]

the_term_parser = TermParser()
def parse_term(string):
    return the_term_parser.parse(string)

def add_op(op):
    the_term_parser.add_op(op)
