# Compiler project
# Gary Duncan and Sameer Sherwani
# Work influenced and snippets/ideas from  the Dragon Book, www.python.org, wiki.python.org/moin/LanguageParsing; stackoverflow.com; github.com

import copy, struct, functools, operator, datetime, re


# Program file
source_program = 'source.program'

# Setup files
source_grammar = 'source.grammer'


from Lex  import *
from Semantics import reductions


class stackTuple:
	def __init__(self, syn, sem=None):
		self.syn, self.sem = syn, sem
		self.p = syn if sem is None else sem
	def __eq__(self, o): return self.syn == o.syn
	def __str__(self): return self.p
		
	@staticmethod
	def showSyn(t):
		return " ".join([str(x.syn) for x in t])
	
	@staticmethod
	def showSem(t):
		return " ".join(['-' if x.sem is None else str(x.sem) for x in t])
	

class Parser:
    ending = stackTuple("<$>")
    symbolTables = []
    def parse(self):
        moretokens = 1
        use_current_token = 0
        token = ''
        ending = stackTuple("<$>")
        stack = [Parser.ending] 
        while moretokens:
            if use_current_token:
                use_current_token = 0
                token = save_token
            else:
                token = Token('')
                token = Scanner.getNextToken()
                save_token = token
                if token.tag == '$': 
                    itr = self.semFunc[1](self, handle, instStacktoken, Parser.symbolTables, Scanner.flag14, Scanner.flag15, Scanner.flag16)
                    if itr != None:
                        for iTuple in itr: 
                            if Scanner.flag13: print("          Tuple is: %s" % iTuple)
           #             yield iTuple 
                    break
            if token != None:
 #               print token.tag, token.token_code, token.value
                if token.token_code == 0:
                    moretokens = 0
                    pass
            else:
                print Scanner.flag20
            token = stackTuple(token.tag, token.value)
            if  token is Parser.ending and stack[-1].syn == 'start': break 
            relation = self.compareTokens(stack[-1], token)		
            if Scanner.flag9:
                print("Top of Stack: %s, Input: %s, Relation: %s" % (stack[-1], token, Structure.pchar[relation]))

            if relation == 0: 
                print 'Character Pair Error between tokens "%s" and "%s"' % (stack[-1], token)
                ignoredTokens, poppedStack = [], []
                while stack[-1].syn not in ('decllist', 'statlist'): 
                    if stack[-1] is Parser.ending: break
                    poppedStack.append(stack.pop())
#                    for poptoken in token:
#                        ignoredTokens.append(poptoken)
                    ignoredTokens.append(token)
                    if token.syn == ':': break
                break

            if relation in { Structure.EQ, Structure.LT }: 
                stack.append(token); pass
            else: 
                if Scanner.flag8:
                    print("Syntax Stack Before red: %s" % stackTuple.showSyn(stack))
                if Scanner.flag12:
                    print("Semantic Stack Before red: %s" % stackTuple.showSem(stack))
                stack_save = list(stack)
                cnt = 0
                cnt_save = 100
                loopcnt = 0;

                while loopcnt < 10:
                    handle = [stack.pop()]; cnt = 1; 
                    while cnt < cnt_save and stack[-1] is not Parser.ending and self.compareTokens(stack[-1], handle[0]) is Structure.EQ:
                        handle.insert(0, stack.pop()); cnt = cnt +1; 
                    reduced = self.grammar.findProductionFromright_side([x.syn for x in handle])
                    if not reduced:
                            stack = list(stack_save); cnt_save = cnt-1; cnt = 0; loopcnt = loopcnt +1;
                    else:
                            loopcnt = 10

                if not reduced:
                    handle = [stack.pop()]; cnt = 1; 
                    while stack[-1] is not Parser.ending and self.compareTokens(stack[-1], handle[0]) is Structure.EQ:
                        fred = self.compareTokens(stack[-1], handle[0])
                        print fred, stack[-1], handle[0]
                        handle.insert(0, stack.pop()) 
                    fred = self.compareTokens(stack[-1], handle[0])
                    print fred, stack[-1], handle[0]

                if Scanner.flag10:
                    print("Handle to find Production: %s" % stackTuple.showSyn(handle))

                reduced = self.grammar.findProductionFromright_side([x.syn for x in handle]) # match handle with right_side, find the red
                if not reduced: 
                    print 'Reducibility Error with right_side "%s"' % stackTuple.showSyn(handle)
                instStacktoken = stackTuple(reduced.findSymbol())

                if self.compareTokens(stack[-1], instStacktoken) in { 0, Structure.GT }:
                    print 'Stackability Error between top of stack "%s" and left_side "%s"' % (stack[-1], instStacktoken)

                if Scanner.flag7:
                    print("red #%d: %s --> %s" % (reduced.red, reduced.left_side, reduced.right_sideSymbs()))

                stack.append(instStacktoken) 
                use_current_token = 1
                if reduced.red in self.semFunc: 
 #                   itr = self.semFunc[reduced.red](self, handle, instStackToken, Parser.symbolTables)
                    itr = self.semFunc[reduced.red](self, handle, instStacktoken, Parser.symbolTables, Scanner.flag14, Scanner.flag15, Scanner.flag16)
                    if itr != None:
                        for iTuple in itr: 
                            if Scanner.flag13: print("          Tuple is: %s" % iTuple)
           #             yield iTuple 
                else: instStacktoken.sem = handle[0].sem

                if Scanner.flag8:
                    print("Syntax Stack After red #%d: %s" % (reduced.red, stackTuple.showSyn(stack)))
                if Scanner.flag12:
                    print("Semantic Stack After red #%d: %s" % (reduced.red, stackTuple.showSem(stack)))


    def __init__(self, grammar, semFuncs=dict()):
    	self.grammar = grammar
    	self.semFunc = semFuncs
    	self.errors = 0
    	self.tCounter = 0
    
    def compareTokens(self, leftToken, rightToken):
    	if leftToken is Parser.ending: return Structure.LT
    	if rightToken is Parser.ending: return Structure.GT
    	return self.grammar.compareTokens(leftToken.syn, rightToken.syn)
    	
    def findErrorCount(self): return self.errors


class stackTuple:
	def __init__(self, syn, sem=None):
		self.syn, self.sem = syn, sem
		self.p = syn if sem is None else sem
	def __eq__(self, o): return self.syn == o.syn
	def __str__(self): return self.p
		
	@staticmethod
	def showSyn(t):
		return " ".join([str(x.syn) for x in t])
	
	@staticmethod
	def showSem(t):
		return " ".join(['-' if x.sem is None else str(x.sem) for x in t])
	
class Structure:
	def setup(self,grammar_in): return bool(self.__setup)
	
	EQ, LT, GT = [2**i for i in range(3)]
	pchar = ".=L3G567"

	def __init__(self,grammar_in):

		self.head, self.equals, self.heads, self.tails, self.p = None, set(), set(), set(), 0
		self.symbols1 = []
		self.symbols2 = []
		self.productions = {}
		
		for this_line in open(grammar_in, 'r').readlines():
			if this_line[0] == '$': continue 
				
			split = this_line.strip('\r\n').split(' '); r = split[1:] # split tokens into set
			if split[0]: self.head = split[0]; self.symbols1.append(self.head) # symbols1 = headers

			t = [x for x in r if x]; self.p += 1; self.productions[self.p] = getProd(left_side=self.head, right_side=t, red=self.p) # create the production
			self.heads.add((self.head, t[0])); self.tails.add((self.head, t[-1])) # heads, and heads/tails
			for e1,e2 in zip(t[0:], t[1:]): self.equals.add((e1, e2)) # pairs of tokens next to each other
			
		for t in { tok for x in self.productions.values() for tok in x.right_side }:#looking for rights with no lefts
			if t not in self.symbols1 + self.symbols2: self.symbols2.append(t)
		self.symbols1.sort(); self.symbols2.sort()#symbols 1 have left/right; rest are marks, etc
			
		allSymbols = self.symbols1 + self.symbols2; z = len(allSymbols)
		dictSymbols = dict(zip(allSymbols, range(z)))
			
		gen_ = lambda x: zeros([x, x], int)# arrays of zeros
		E, H, T = gen_(z), gen_(z), gen_(z)
			
		for lt, rt in self.equals: E[dictSymbols[lt]][dictSymbols[rt]] = 1 #indicate equals 
		for lt, rt in self.heads: H[dictSymbols[lt]][dictSymbols[rt]] = 1 #indicate heads
		for lt, rt in self.tails: T[dictSymbols[lt]][dictSymbols[rt]] = 1 # indicate tails
			
		transitiveClosure(H) #trans closure for heads
		transitiveClosure(T) #trans closure for heads
			
		L = matrixMultiply(E, H) # find the less thans
		G = matrixMultiply(transpose(T), multimap(lambda x, y: x or y, E, L)) # find the greater thans
			
		E = multimap(lambda x: Structure.EQ if x else 0, E)
		L = multimap(lambda x: Structure.LT if x else 0, L)

		G = multimap(lambda x: Structure.GT if x else 0, G)
		self.ptbl = multimap(lambda x, y, z: x or y or z, E, L, G) # final big map
		self.cache_sm = self.findSymbolicMap()	

	

	def findSymbols(self):
		return self.symbols1 + self.symbols2
		
	def findSymbolicMap(self):
		return dict([(v, k) for k, v in enumerate(self.findSymbols())])

	def compareTokens(self, c1, c2):
		sm = self.cache_sm
		if c1 not in sm: return 0
		if c2 not in sm: return 0
		return self.ptbl[sm[c1]][sm[c2]]
		
	def findProductionFromright_side(self, right_side):
		for i in self.productions.values():
			if i.right_side == right_side: return i
		return None
	
	def __str__(self):
		s, symbs = str(), self.findSymbols(); z = len(symbs); mz = max(map(len, symbs)); mlist = [x.ljust(mz) for x in symbs]
		for i in range(mz): s += ((" " * mz) + (" %c" * z) % tuple([x[i] for x in mlist])) + "\n"
		for i, row in enumerate(self.ptbl): s += mlist[i] + (" %c" * z) % tuple([Structure.pchar[i] for i in row]) + "\n"
		return s

def matrixMultiply(matrix1, matrix2):
	matrix2 = transpose(matrix2)
	return [[dotProduct(row1, row2) for row2 in matrix2] for row1 in matrix1]
	
def transitiveClosure(matrix): # perform the trans closure - see class recordig on this
	while True:
		x = [x[:] for x in matrix]
		for j, row2 in enumerate(matrix):
			for i, row1 in enumerate(matrix):
				if matrix[i][j]: row1[:] = map(lambda x,y: x or y, row1, row2)
		if x == matrix: return
		
def zeros(dims, data): # array of zeros
	if not len(dims): return data(0)
	return [zeros(dims[1:], data) for i in range(dims[0])]
	
def transpose(matrix):
	z = len(matrix[0])
	if any(map(lambda x: len(x) != z, matrix)):
		raise TypeError("Matrix is jagged")
	return [[x[i] for x in matrix] for i in range(z)]

def dotProduct(vec1, vec2):
	return sum([x * y for x, y in zip(vec1, vec2)])
	
def multimap(func, *args):
	if not len(args): return None
	if all(map(lambda x: type(x) is list, args)):
		lenargs = len(args[0])
		return [multimap(func, *[x[i] for x in args]) for i in range(lenargs)]
	if all(map(lambda x: type(x) is not list, args)): return func(*args)
	raise TypeError("Arguments not of same dimensionality")
	
class getProd:
	def __init__(self, left_side, right_side, red):
		self.left_side, self.right_side, self.red = left_side, right_side, red
		
	def right_sideSymbs(self): return " ".join(self.right_side)
	def findSymbol(self): return self.left_side
	
	def __str__(self):
		return "%d %s %s" % (self.red, self.left_side, self.right_sideSymbs())
	

if __name__ == '__main__':       
    now = datetime.datetime.now()
    print 'Name: Gary Duncan and Sameer Sherwani'
    print 'email: ernestd@clemson.edu and ssherwa@clemson.edu'
    print 'Time stamp: ' + now.strftime("%m-%d-%Y %H:%M:%S")
    print 'BEGIN PARSE'
    print ''

#   Scan - milestone 1  
    Scanner = Scanner() 
    pgm = ''
    pgm = openProgram(pgm, source_program) 

#   Parse - milestone 2
    grammar = Structure(source_grammar)
    sem = reductions

    parse = ''
    Parser = Parser(grammar,sem)
    parse = Parser.parse()
    
