import math,sys,pickle,util
from pympler.classtracker import ClassTracker
import buffer
from pympler import summary
from operator import itemgetter

class HashSummaryBucket :
	def __init__(self,k, bufsize):
		self.k = k
		self.summary = list()
		self.bufsize = bufsize
		self.buffer = buffer.Buffer(self.bufsize)

	def add(self,entry) :
		if self.buffer.has_element(entry[0]) :						  # update entry in buffer
			self.buffer.update_element(entry[0],entry[1])
		else :
			self.buffer.add(entry[0], (entry[0],entry[1],0.0))	 # insert item
		if len(self.buffer) == self.bufsize :				   # merge if buffer is full
#			self.bulk_update(self.buffer)
			self.merge(self.buffer)
			self.buffer.clear()
#			self.buffer.add(entry[0],(entry[0],entry[1],0.0))

	def bulk_update(self, D2):
		for element in D2.get_buffer().values() :
			if self.summary.has_element(element[0]) :
				self.summary.increase_element(element)
			elif len(self.summary) < self.k :
				self.summary.add_element(element) # item is [(i,j),w] where i,j is the index in the product matrix and w is the weight of the entry
			else :
				self.summary.replace_min(element)

	def merge(self,D2):
		# set union
		if len(self.summary) == 0 :
			self.summary.extend(D2.get_buffer().values())						# copy directly if summary is empty
			return
		#_bucket = self.summary.get_min_bucket()

		for item in self.summary :
			if D2.has_element(item[0]) :
				D2.update_element(item[0], item[1],item[2])
			else :
				D2.add(item[0],item)

		temp = list(D2.get_buffer().values())
		temp.sort(key=itemgetter(1), reverse=True)

#		print temp
		self.summary = list()
		if len(temp) > self.k :
#			self.summary.addAll(temp[:self.k])
			kth = temp[self.k][1]
#			print "kth, ", kth
			for item in temp :
				if item[1] >= kth :
					self.summary.append(item)
		else :
			self.summary.extend(temp)
		return

	def get_summary(self,_type):
		if len(self.buffer) > 0 :
			self.merge(self.buffer)
		return self.summary.get_summary(type=_type)

def main():
	filename = 'input_zipf_1000000_1.5'
	data_dir = "../../data/"
	_input = pickle.load( open(data_dir +  filename) )
	#outstr = ""
	print "k\tavg\tgetmin\t|minbucket|\t#del min"
	#outstr += "k\tavg\tgetmin\t|minbucket|\t#del min\n"
#	_input = [((1,2),2),((1,2),1),((3,2),2),((3,2),2),((3,5),2),((5,2),2),((1,2),2),((3,4),2),((3,3),2)]
	for q in range(1,2) :
		N = math.pow(10, q)
		for i in range(1) :
			#k = int(math.pow(2, i) * N)
			k = 200

#			k = int(i * N)
			avg = 0.0
			it = 1
			for j in range(it) :
				hs = HashSummaryBucket(k,k)
				tracker = ClassTracker()
				tracker.track_class(HashSummaryBucket,resolution_level=2)
				tracker.track_class(buffer.Buffer,resolution_level=2)
				tracker.track_object(hs,resolution_level=2)
				tracker.start_periodic_snapshots(interval=1)
				with util.Timer() as t :
					for _item in _input :
						hs.add(_item)
				avg += t.interval
				#print "K: %s\t time: %.3f" % (k,t.interval)

			#print "%s\t%.3f\t%s\t%s\t%s" % (k,avg/it,hs.get_min_count,len(hs.buckets[hs.min_bucket][3]),hs.delete_bucket_count)
			#outstr += "%s\t%.3f\t%s\t%s\t%s\n" % (k,avg/it,hs.get_min_count,len(hs.buckets[hs.min_bucket][3]),hs.delete_bucket_count)
#			print "%s\t%.3f" % (k, avg/3)
#			print "#get min: ", hs.get_min_count
#			print "size minbucket", len(hs.buckets[hs.min_bucket][3])
#			print "#del min: ", hs.delete_bucket_count
	#f = open('../out/hashsummary_4M-2.5.dat', 'w')
	#f.write(outstr)
	#f.close()
	print "time: %.3f\n" % t.interval
	#print hs.buckets

#	for _item in _input :
#		try :
#			hs.insert(_item)
#			#print hs.buckets
#		except Exception as e :
#			#print "ERROR"
#			#print e
#			#print hs.buckets
#
#			sys.exit(-1)
	tracker.stats.sort_stats('size').reverse_order().print_stats()
	#print hs.buckets

#print "self.elements", hs.elements

main()