from timeseries_performance_pb2 import *

## Import the timeseries performance from a protobuf file into an internal representation
def import_timeseries_protobuf(filename, aurora_network):
	timestamps = []
	performance_timeseries = []
	performance_timeseries_protobuf = PerformanceTimeSeries()
	try:
		f = open(filename, "r")
		performance_timeseries_protobuf.ParseFromString(f.read())
		## now it is time to transfer all the parameters in the protobuf file into the current object
		f.close()
	except IOError:
		print("Could not open file: {}".format( filename))
		return None, None
	for performance_entry in performance_timeseries_protobuf.entries:
		ts = performance_entry.timestamp
		ahc = performance_entry.average_hop_count
		if ahc > 2:
			print("the fuck???")
		link_utilization_distribution = []
		mlu = 0
		total_links = 0
		total_weighted_lu = 0.
		for lu_protobuf in performance_entry.link_utilizations:
			lu = lu_protobuf.link_utilization
			mlu = max(mlu, lu)
			link_count = lu_protobuf.num_links
			total_links += link_count
			total_weighted_lu += (link_count * lu)
			i = lu_protobuf.src
			j = lu_protobuf.dst
			link_utilization_distribution.append( (lu, link_count, i, j) )
		link_utilization_distribution = sorted(link_utilization_distribution, key=lambda x: x[0])
		cumulative_link_counts = (aurora_network.get_total_links() - total_links)
		p50_index = 0.5 * (aurora_network.get_total_links())
		p90_index = 0.9 * (aurora_network.get_total_links())
		lu50 = -1.
		lu90 = -1.
		mlu = link_utilization_distribution[-1][0]
		for (lu, num_links, _, _) in link_utilization_distribution:
			if lu50 < 0 and cumulative_link_counts >= p50_index:
				lu50 = lu
			elif lu90 < 0 and cumulative_link_counts >= p90_index:
				lu90 = lu
			cumulative_link_counts += num_links
		alu = total_weighted_lu / total_links
		# finally append the performance to perf timeseries and ts to timestamp
		performance_timeseries.append((mlu, lu90, lu50, ahc, link_utilization_distribution))
		timestamps.append(ts)
	return performance_timeseries, timestamps


## Import the timeseries performance from a protobuf file into an internal representation
def import_timeseries_protobuf_lightweight(filename, aurora_network):
	timestamps = []
	performance_timeseries = []
	performance_timeseries_protobuf = PerformanceTimeSeries()
	try:
		f = open(filename, "r")
		performance_timeseries_protobuf.ParseFromString(f.read())
		## now it is time to transfer all the parameters in the protobuf file into the current object
		f.close()
	except IOError:
		print("Could not open file: {}".format( filename))
		return None, None
	for performance_entry in performance_timeseries_protobuf.entries:
		ts = performance_entry.timestamp
		ahc = performance_entry.average_hop_count
		mlu = 0
		for lu_protobuf in performance_entry.link_utilizations:
			lu = lu_protobuf.link_utilization
			mlu = max(mlu, lu)
		# finally append the performance to perf timeseries and ts to timestamp
		performance_timeseries.append((mlu, ahc,))
		timestamps.append(ts)
	return performance_timeseries, timestamps


## Exports just the timeseries to protobuf, and not the network configurations
def export_timeseries_to_protobuf(filename, aurora_network, timestamps, performance_timeseries):
	assert(len(timestamps) == len(performance_timeseries))
	performance_timeseries_protobuf = PerformanceTimeSeries()
	for ts, performances in zip(timestamps, performance_timeseries):
		entry = performance_timeseries_protobuf.entries.add()
		lu_distribution = performances[4]
		entry.timestamp = ts
		entry.average_hop_count = performances[3]
		for lu, num_links, i, j in lu_distribution:
			lu_proto = entry.link_utilizations.add()
			lu_proto.src = i
			lu_proto.dst = j
			lu_proto.link_utilization = lu
			lu_proto.num_links = num_links
	with open(export_filename, "wb") as f:
		f.write(performance_timeseries_protobuf.SerializeToString())
	return