from __future__ import generators
import os,sys,sets,random,logging
from pprint import pprint

import boost_semantic_graph
import posnet_compare


from posnet_handlers import * # import all handlers

algorithms = {  "first_fit (Point query)":first_fit_point_query_handler,\
		"first_fit2 (Point query)":first_fit2_point_query_handler,\
		#"all_containers (Point query)":all_containers_point_query_handler,\
		#"all_contained (Point query)":all_contained_point_query_handler,\
		"all_parents (Point query)":all_parents_point_query_handler, \
		"all_children (Point query)":all_children_point_query_handler, \
		"all_top (Point query)":all_top_point_query_handler, \
		"parents (Point query)":parents_point_query_handler, \
		"exact (Point query)":exact_point_query_handler, \
		"all_match (Range query)":all_match_range_query_handler  }  

Relation_to_weight = {"c2p" : 100, "p2p" : 10, "p2c" : 1}
weight_to_relation = {100: "c2p", 10: "p2p", 1:"p2c"}
weight_sym = {100:1,10:10,1:100}
#Relations = {"c2p":"parents", "p2c" : "children","p2p":"peers"}
#Relations_sym = {"c2p":"p2c", "p2c":"c2p","p2p":"p2p"}


def compare(a,b):
	return posnet_compare.compare(a,b)

def SemanticPoint_to_tuple(sp):
	x = ()
	for e in xrange(sp.size()):
		x += sp.get(e),
	return x

def point_to_SemanticPoint(p):
	ret = boost_semantic_graph.SemanticPoint(len(p))
	i = 0
	for e in p:
		ret.set(i,e)
		i += 1
	return ret
		

# a semantic graph with info
class posnet_local_semantic_graph(object):
	def __init__(self,poset):
		object.__init__(self) #,poset)
		self._poset = dict()
		self._poset_keys = [] #self._poset.keys()
		poset_keys = poset.keys()
		poset_keys.sort()
		for k in poset_keys:
			self.add_poset_key(k, poset[k] )
		#self._poset_keys.sort()
		self._graph = boost_semantic_graph.SemanticGraph() # internal semantic graph structure
		self._media = None # the overlay node that maitain info on graph nodes => if a node is external, a query will be pushed out from this graph
	#-----------------------------------------

	def has_poset_key(self,key):
		for kk in self._poset_keys:
			if kk == key or kk[1] == key: return True
		else: return False 

	def add_poset_key(self,key,vals = None):
		if type(key) == type( (1,2) ):
			key = key[-1]
		if not self.has_poset_key(key):
			self._add_poset_key_nocheck(key,vals)	

	def _add_poset_key_nocheck(self,key,vals = None):
		l = len(self._poset_keys)
		k = (l,key)
		if vals is None: vals  = []
		self._poset[k] = vals
		self._poset_keys.append(k)
		self._poset_keys.sort()
	
	def poset(self): return self._poset
	#-----------------------------------------
	def __len__(self): return self._graph.num_vertices()
	def get_vertex_id_dict(self):
		_space_point_to_vertex_id = dict()
		for vertex_id in range(self._graph.num_vertices()):
			_space_point_to_vertex_id[ SemanticPoint_to_tuple(self._graph.point(vertex_id) ) ] = vertex_id	
		return _space_point_to_vertex_id
	
	def space_point(self,vertex_id):
		""" return the node id from its description (if exist in this semantic graph)"""
		ret = None
		if self._graph.num_vertices() > vertex_id:
			ret = SemanticPoint_to_tuple(self._graph.point(vertex_id))
		if ret is None: raise Exception("node not found with vertex_id: "+str(vertex_id))
		return ret
	
	#-----------------------------------------

	def n_nodes(self): return self._graph.num_vertices()

	def random_node(self):
		return random.choice(range(0,self.n_nodes()))

	def random_space_point(self):
		return self.space_point(self.random_node()) 

	def set_media(self,onode): self._media = onode
	
	def __str__(self):
		import StringIO
		out  = StringIO.StringIO()
		#print >>out,"graph.nodes:",self._graph.nodes()
		#print >>out,"graph.space_points:",self._vertex_id_to_space_point
		#print >>out,"graph.vertex_id:",self._space_point_to_vertex_id
		#print >>out,"graph.edges:",self._graph.edges()
		### print >>out,"graph.relations:",self._links
		return out.getvalue()
	
	
	def vertex_id(self,space_point):
		""" return the node id from its description (if exist in this semantic graph)"""
		sp = point_to_SemanticPoint(space_point)	
		v = self._graph.get_vertex(sp)
		if v == -1:
			raise Exception("node not found with description: "+str(space_point))
		else: return v
	
	def space_point_table(self,space_point):
		ty = type( space_point)  
		if ty == type( (1,2) ): return [x for x in  space_point]
		elif ty == type( [] ): return space_point
		else:
			try:#print type(space_point),space_point
				return [ space_point[x[-1]]  for x in self._poset_keys ]
			except Exception,e:
				print "poset keys",self._poset_keys
				print >>sys.stderr,"Error while trying to convert point to table: "+str(space_point)
				from traceback import print_exc
				print_exc()
				sys.stderr.flush()
				raise

	def hashable_space_point(self,space_point):
		ty = type( space_point)  
		if ty == type( (1,2) ): return space_point
		elif ty == type( [] ):
			space_point_copy = ()
			for x in space_point: space_point_copy += x,
			return space_point_copy 
		else:
			space_point_copy = ()
			try:
				for x in self._poset_keys: space_point_copy += space_point[x[-1]],
				return space_point_copy 
			except:
				print >>sys.stderr,"semantic_graph: error while treating space point",space_point
				raise

	def add_node(self,space_point):
		""" add a vertex in semantic graph if not exist for this data descrption""" 
		sp = point_to_SemanticPoint(space_point)
		vertex_id = self._graph.add_vertex(sp)
		return vertex_id

	#def _iter_relation(self,vertex_id,rel):
	#	neigh = self._graph[vertex_id]
	#	for v,relation in neigh.items():
	#		if relation == rel: yield v
	def iter_edges(self):
		l = boost_semantic_graph.EdgeList()
		self._graph.iter_edges(l)	
		for i in xrange(l.size()):
			u  = l.getsrc(i)
			v  = l.getdst(i)
			w  = l.getw(i)
			yield (u,v,weight_to_relation[w])
	
	def is_top(self,vertex_id):
		global Relation_to_weight
		c2p = Relation_to_weight["c2p"]
		if self._graph.has_neighbor_weight(vertex_id,c2p): return False
		else: return True
		
	def iter_parents(self,vertex_id):
		global Relation_to_weight
		list = boost_semantic_graph.NodeList()
		self._graph.neighbors_weight(vertex_id,list,Relation_to_weight["c2p"])
		for xi in range(list.size()):
			yield list.get(xi)	
	def random_parent(self,vertex_id):
		global Relation_to_weight
		list = boost_semantic_graph.NodeList()
		self._graph.neighbors_weight(vertex_id,list,Relation_to_weight["c2p"])
		try:
			i = random.choice(range(list.size()))
			return list.get(i)	
		except: return None
		
	def iter_children(self,vertex_id):
		global Relation_to_weight
		list = boost_semantic_graph.NodeList()
		self._graph.neighbors_weight(vertex_id,list,Relation_to_weight["p2c"])
		for xi in range(list.size()):
			yield list.get(xi)	

	def iter_peers(self,vertex_id):
		global Relation_to_weight
		list = boost_semantic_graph.NodeList()
		self._graph.neighbors_weight(vertex_id,list,Relation_to_weight["p2p"])
		for xi in range(list.size()):
			yield list.get(xi)	
	
	def remove_node(self,vertex_id):
		self._graph.remove_vertex( vertex_id)

	def add_edge(self,vertex_id1,vertex_id2,relation,relation_sym,bidir = None):
		global Relation_to_weight
		if bidir is None: bidir = True
		if not self._graph.has_edge(vertex_id1,vertex_id2):
			self._graph.add_edge(vertex_id1,vertex_id2,Relation_to_weight[relation])
			if bidir:
				self._graph.add_edge(vertex_id2,vertex_id1,Relation_to_weight[relation_sym])

	def delete_edge(self,vertex_id1,vertex_id2):
		self.remove_edge_unidir(vertex_id1,vertex_id2)
		self.remove_edge_unidir(vertex_id2,vertex_id1)
	def remove_edge_unidir(self,vertex_id1,vertex_id2):
		if self._graph.has_edge(vertex_id1,vertex_id2):
			self._graph.remove_edge(vertex_id1,vertex_id2)

	#def degree(self,vertex_id):
	#	return len(self._data[vertex_id]["neighbors"])

	def process_query(self,receiver,algorithm,request_to_treat,sender = None,link_type = None):
		if link_type is None: link_type = ""
		if link_type is None and sender is not None: link_type = self.get_link_type(sender,receiver)
		query_handler = algorithms[algorithm] # find from algorithm
		if algorithm.find("(Point query)") > -1 and request_to_treat is not None:
			request_to_treat = self.space_point_table(request_to_treat)
		elif algorithm.find("(Range query)") > -1 and request_to_treat is not None:
			first_point =  self.space_point_table(request_to_treat[0]) 
			second_point =  self.space_point_table(request_to_treat[1]) 
			request_to_treat = (first_point,second_point)
		for returned_info in query_handler(request_to_treat,receiver,self,sender,link_type):
			#print "In semantic(",algorithm,"): return",returned_info
			yield returned_info

	#def successors(self,vertex_id):
	#	return self._graph.successors(vertex_id)
	#def predecessors(self,vertex_id):
	#	return self._graph.predecessors(vertex_id)
	def parents(self,vertex_id):
		return [x for x in self.iter_parents(vertex_id)]
		#return self._data[vertex_id][Relations["c2p"]]
	def children(self,vertex_id):
		return [x for x in self.iter_children(vertex_id)]
		#return self._data[vertex_id][Relations["p2c"]]
	def peers(self,vertex_id):
		return [x for x in self.iter_peers(vertex_id)]
		#return self._data[vertex_id][Relations["p2p"]]
			
	def get_link_type(self,src,dst):
		global weight_to_relation
		weight = self._graph.weight(src,dst)
		relation  = weight_to_relation[weight]
		return relation 
	def get_link_type_sym(self,src,dst):
		global weight_to_relation
		return weight_to_relation[weight_sym[self._graph.weight(src,dst)]]
	def relation(self,s_id,t_id):
		global weight_to_relation
		return weight_to_relation[self._graph.weight(s_id,t_id)]
	def relation_sym(self,s_id,t_id):
		global weight_to_relation
		return weight_to_relation[weight_sym[self._graph.weight(s_id,t_id)]]
		#return self._links[(s_id,t_id)]
	#def nodes(self):
	#	return self._graph.nodes()
	#def top(self):	
	#	ret  = []
	#	for x in self._graph.nodes():
	#		if len(self._data[x]["peers"]) > 0: ret.append(x)
	#	return ret	
	
	def degree_distrib(self):
		ret = [] 
		for x in self._graph.nodes_iter():
			deg = self._graph.out_degree(x)
			if deg == 0: continue
			ret.append(deg)
		ret.sort()
		ret.reverse()	
		return ret 	

	def split(self):
		neighbor = sets.Set() 
		is_top = sets.Set()
		local_top = sets.Set() 
		global weight_to_relation
		global Relation_to_weight
		for x in xrange(self._graph.size()):
			if self._graph.out_degree(x) == 0:
				neighbor.add(x)
				continue
			#parent_list = []
			#child_list = []a
			has_parents  = self._graph.has_neighbor_weight( Relation_to_weight["c2p"])
			if not has_parents: local_top.add(x)
		#-------------------------
		#print "graph.neighbors",neighbor
		subgraph1 = sets.Set()
		subgraph2 = sets.Set()
		node_to_choice = dict()
		firstchain= []

		while len(local_top) == 1:
			#print "graph.single_top",local_top
			top_alone = local_top.pop()
			subgraph1.add(top_alone)
			node_to_choice[top_alone] = 1
			neighs = boost_semantic_graph.NodeList()
			self._graph.neighbors(top_alone,neighs)
			for neighbo in neighs:
				if self._graph.out_degree(neighbo) == 0: continue
				if  weight_to_relation[self._graph.weight(top_alone,neighbo)] != "p2c"or neighbo in neighbor: continue
				local_top.add(neighbo)
			if len(local_top) > 1: break
			firstchain.append(top_alone) 
		#-----------------------------------
		# if finish, this is a chain ->beak into two parts
		if len(local_top) == 0:
			depth = len(subgraph1)
			if depth > 1:
				tomove = firstchain[x:]
				for x in tomove:
					subgraph1.remove(x)
					subgraph2.add(x)
			return subgraph1,subgraph2
		del firstchain
		#--------------------------------
		one = False
		starting_one = sets.Set()
		starting_two = sets.Set()
		for no in local_top:
			if one:
				subgraph1.add(no)
				starting_one.add(no)
				node_to_choice[no] = 1
				one = False
			else:
				subgraph2.add(no)
				starting_two.add(no)
				node_to_choice[no] = 2
				one = True
		#print "1:",subgraph1,"2:",subgraph2
		node_to_local_top = dict()
		for ltnode in local_top:
			to_visit = sets.Set()	
			to_visit.add( ltnode )
			while len(to_visit) > 0:
				element = to_visit.pop()
				if not node_to_local_top.has_key(element): node_to_local_top[element] = sets.Set()
				node_to_local_top[element].add(node_to_choice[ltnode])
				children = boost_semantic_graph.NodeList()
				self._graph.neighbors_weight(top_alone,children,Relation_to_weight["p2c"])
				for child in children:
					if self._graph.out_degree(child) == 0: continue
					to_visit.add( child )
		todel = []
		#print "classify",node_to_local_top	
		for no,li in node_to_local_top.items():
			if len(li) == 1:	
				choice = li.pop()
				if choice == 1:
					subgraph1.add(no)
				else:
					subgraph2.add(no)
				todel.append(no)
		for no in todel: del node_to_local_top[no]
		#print "tie_break",node_to_local_top	
		for no in node_to_local_top.keys():
			subgraph1.add(no)
		#print "1:",subgraph1,"2:",subgraph2
		return subgraph1,subgraph2
	#def degree_distribution(self):
	#	raise Exception("not implemented")
	#	ret = dict()
	#	selector = { "c2p":"parents","p2c":"children","p2p":"peers"}
	#	for x in self._graph.nodes_iter():
	#		if self._graph.out_degree(x) == 0: continue
	#		count = { "peers":0,"parents":0,"children":0}
	#		for n in self.sucessors(x):
	#			link = (x,n)
	#			relation = self._links[link]
	#			count[selector[relation]] += 1 
	#		ret[x] = count
	#	return ret  




#--------------------------------------
def process_job_iter(sg,source_vertex_id,target_vertex_id,query,already_propagated = None): 
	""" process a query assumed assuming to be at a semantic node
	this method may do the following things:
	- propagate the query to next semantic nodes
	- return results (as table of posnet_result_message)
	"""
	#--------------	
	allowed_to_execute = True
	list_type = type( (1,2) )
	link_type = None
	if True:
		try:
			link_type = sg.get_link_type( source_vertex_id, target_vertex_id)
			alrs = len(already_propagated)
			already_propagated.add( ( target_vertex_id, link_type) )
			if alrs == len(already_propagated):
				allowed_to_execute = False
		except: pass
	#--------------
	if allowed_to_execute:
		#tstart = time.clock()
		source_link_type = None
		if source_vertex_id is not None:
			try: source_link_type = sg.get_link_type(source_vertex_id,target_vertex_id)
			except: source_link_type = sg.get_link_type_sym(target_vertex_id,source_vertex_id)
		for returned_info in sg.process_query(target_vertex_id,query.query_type,query.query,source_vertex_id,source_link_type):
			######self.logger.info("query #"+str(query.id)+": from semantic graph="+str(returned_info))
			if type(returned_info) == list_type:
				request_data_to_propagate,destination = returned_info
				# at this point, this is a propagation
				thelinktype = None
				try:
					thelinktype = sg.get_link_type(target_vertex_id,destination)
				except:
					thelinktype = sg.get_link_type_sym(destination,target_vertex_id)
				if (destination,thelinktype) not in already_propagated:
					# propagation
					yield 0, destination
			else:
				# this is a match, return result
				vertex_id = returned_info
				space_point = sg.space_point(vertex_id)
				yield 1,space_point
		#self.logger.info("process_query %s"%str(time.clock()-tstart))
		
def process_job(sg,external_nodes,source_vertex_id,target_vertex_id,query,already_propagated = None):
	#tstart = time.clock()
	external_propagation = sets.Set() # []
	internal_propagation = sets.Set() # []
	if already_propagated is None: already_propagated = sets.Set()
	results = sets.Set() # []
	internal_propagation.add(  (source_vertex_id,target_vertex_id)  )
	#------------
	n_iterations = 0
	while len(internal_propagation) > 0:
		source_vertex_id,target_vertex_id = internal_propagation.pop()
		n_iterations += 1
		for code,dat in process_job_iter(sg,source_vertex_id,target_vertex_id,query,already_propagated):
			######self.logger.info("_process_job: iter on code="+str(code)+", dat="+str(dat))
			if code == 0: # propagation
				_destination = dat
				if external_nodes.has_key(_destination): 
					# internal propagation
					external_propagation.add( (target_vertex_id,_destination ) )
				else:
					# external propagation
					internal_propagation.add( (target_vertex_id,_destination ) )
			else: # result
				space_point = dat
				results.add( (target_vertex_id,sg.hashable_space_point(dat[0])) )
	#####self.logger.info("query #"+str(query.id)+": end "+dbg+" "+str(query.query_type)+"( "+str(query.query)+" )")
	#tend = time.clock()
	#self.logger.info("propag: %s"%str(tend-tstart))
	return (results,external_propagation,n_iterations)

def posnet_process_jobs(sg,external_nodes,source_and_dest,query,already_propagated = None):
	#tstart = time.clock()
	external_propagation = sets.Set()
	internal_propagation = sets.Set()
	if already_propagated is None: already_propagated = sets.Set()
	results = sets.Set() # []
	for source_vertex_id,target_vertex_id in source_and_dest:	
		internal_propagation.add( ( source_vertex_id,target_vertex_id) )
	#------------
	n_iterations = 0
	while len(internal_propagation) > 0:
		source_vertex_id,target_vertex_id = internal_propagation.pop()
		n_iterations += 1
		for code,dat in process_job_iter(sg,source_vertex_id,target_vertex_id,query,already_propagated):
			######self.logger.info("process_job: iter on code="+str(code)+", dat="+str(dat))
			if code == 0: # propagation
				_destination = dat
				if external_nodes.has_key(_destination): 
					# internal propagation
					external_propagation.add( (target_vertex_id,_destination ) )
				else:
					# external propagation
					internal_propagation.add( (target_vertex_id,_destination ) )
			else: # result
				space_point = dat
				results.add( (target_vertex_id,sg.hashable_space_point(space_point)) )
	#####self.logger.info("query #"+str(query.id)+": end "+dbg+" "+str(query.query_type)+"( "+str(query.query)+" )")
	#tend = time.clock()
	#self.logger.info("propag: %s"%str(tend-tstart))
	return (results,external_propagation,n_iterations)




