import sys, copy, os
sys.path.append("..")
import CritMat.critmat as cmat
import numpy as np
import scipy
from scipy import fft
import utility as util
from scipy import signal

import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.image as img
from matplotlib import cm


def flatten_traffic_matrix(tm, num_nodes):
	vector = []
	for i in range(num_nodes):
		for j in range(num_nodes):
			if i != j:
				vector.append(tm[i][j])
	return np.array(vector)

def unflatten_traffic_vector(vector, nblocks):
	tm = np.zeros((nblocks, nblocks))
	offset = 0
	for i in range(nblocks):
		for j in range(nblocks):
			if i != j:
				tm[i][j] = vector[offset]
				offset += 1
	return tm

def read_in_facebook_traffic(trace_file_name, network_ids_filename, aggregation_window):
	valid_network_ids = util.read_in_network_ids(network_ids_filename)
	traffic_matrices, timestamps = util.read_traffic_matrix_protobuf(trace_file_name, considered_network_id=valid_network_ids, return_timestamps=True)
	print("Number of traffic matrices : {}".format(len(traffic_matrices)))
	for tm in traffic_matrices:
		for i in range(len(tm)):
			for j in range(len(tm)):
				tm[i][j] = float(tm[i][j]) * 8 / 1E6 / float(aggregation_window)
	return traffic_matrices, valid_network_ids, timestamps

def boundable(nblocks, traffic_matrices, tm):
	for i in range(nblocks):
		for j in range(nblocks):
			if i != j:
				boundable = False
				for representative_tm in traffic_matrices:
					if representative_tm[i][j] >= tm[i][j]:
						boundable = True
						break
				if not boundable:
					return False
	return True

## determines whether a set of representative traffic matrices can bound all other traffic matrices
def boundability_analysis(nblocks, traffic_matrices, num_representative_tms, training_indices=(0,1), evaluation_indices=(0,1)):
	training_traffic_matrices = traffic_matrices[training_indices[0] : training_indices[1]]
	num_training_tms = len(training_traffic_matrices)
	num_representative_tms = min(num_training_tms, num_representative_tms)
	training_traffic_vectors = [flatten_traffic_matrix(x, nblocks) for x in training_traffic_matrices]
	traffic_predictor = cmat.CritMat(training_traffic_vectors, None) 
	representative_traffic_vectors = traffic_predictor.train(number_of_clusters=num_representative_tms)
	representative_traffic_matrices = [unflatten_traffic_vector(x, nblocks) for x in representative_traffic_vectors] 
	boundable_tms = []
	for tm_index in range(evaluation_indices[0], evaluation_indices[1], 1):
		value = 0
		if boundable(nblocks, representative_traffic_matrices, traffic_matrices[tm_index]):
			value = 1
		boundable_tms.append(value)
	print("Boundable entries: {} % ".format(float(sum(boundable_tms))/len(boundable_tms)))
	return

def pod_ingress_egress_traffic(nblocks, traffic_matrices):
	num_snapshots = len(traffic_matrices)
	egress_traffic = [0] * nblocks
	ingress_traffic = [0] * nblocks
	for i in range(nblocks):
		egress_traffic[i] = [0] * num_snapshots
		ingress_traffic[i] = [0] * num_snapshots

	for tm, tm_index in zip(traffic_matrices, range(num_snapshots)):
		for pod in range(nblocks):
			egress_total = sum(tm[pod])
			ingress_total = 0
			for target_pod in range(nblocks):
				if pod != target_pod:
					ingress_total += tm[target_pod][pod]
			egress_traffic[pod][tm_index] = egress_total
			ingress_traffic[pod][tm_index] = ingress_total

	fig = plt.figure()
	for pod in range(nblocks):
		plt.plot(egress_traffic[pod])
	plt.legend(["pod : {}".format(x) for x in range(nblocks)])
	plt.title("Egress Traffic")
	fig = plt.figure()
	for pod in range(nblocks):
		plt.plot(ingress_traffic[pod])
	plt.legend(["pod : {}".format(x) for x in range(nblocks)])
	plt.title("Ingress Traffic")
	plt.show()

def pod_pair_timeseries_analysis(nblocks, aggregation_period, traffic_matrices):
	num_snapshots = len(traffic_matrices)
	block_pairs_timeseries = {}
	print("Generating timeseries...")
	for i in range(nblocks):
		for j in range(nblocks):
			if i != j:
				block_pairs_timeseries[(i,j)] = [0] * num_snapshots
	for tm, tm_index in zip(traffic_matrices, range(num_snapshots)):
		for i in range(nblocks):
			for j in range(nblocks):
				if i != j:
					block_pairs_timeseries[(i,j)][tm_index] = tm[i][j]
	print("Competed timeseries generation")
	block_pairs_frequency = {}
	print("Performing FFT")
	square_sum = {}
	l1_sum = {}
	for i,j in block_pairs_timeseries.keys():
		freq, spectra = signal.periodogram(block_pairs_timeseries[(i,j)], fs=1)
		block_pairs_frequency[(i,j)] = (freq, spectra)
		#square_sum[(i,j)] = sum([np.abs(x) ** 2 for x in block_pairs_frequency[(i,j)])
		#l1_sum[(i,j)] = sum(block_pairs_frequency[(i,j)])
	fig = plt.figure()
	for i,j in block_pairs_frequency.keys():
		spectrum = np.abs(block_pairs_frequency[(i,j)][1])
		plt.plot(block_pairs_frequency[(i,j)][0], spectrum, linewidth=0.5)
	fig = plt.figure()
	for i,j in block_pairs_frequency.keys():
		spectrum = np.abs(block_pairs_frequency[(i,j)][1])
		pdf = spectrum / sum(spectrum)
		plt.plot(block_pairs_frequency[(i,j)][0], pdf, linewidth=0.5)
	fig = plt.figure()
	for i,j in block_pairs_frequency.keys():
		spectrum = np.abs(block_pairs_frequency[(i,j)][1])
		pdf = np.cumsum(spectrum) / sum(spectrum)
		plt.plot(block_pairs_frequency[(i,j)][0], pdf, linewidth=0.5)
	plt.ylim(ymin=0, ymax=1)
	plt.title("CDF Power Spectral Density with Aggregation: {}s".format(aggregation_period))
	plt.xlabel("Frequency")
	#print("square sum: \n{}".format(square_sum))
	#print("l1 sum: \n{}".format(l1_sum))
	#plt.xlim(xmin=-1)
	plt.show()
	return

def read_in_file(filename):
	index = 0
	timestamps = []
	boundable_lookback = []
	found_boundable = []
	try:
		with open(filename, 'r') as f:
			first_line = True
			for line in f:
				if first_line or len(line) == 0:
					first_line = False
				else:
					split_line = line.split()
					timestamps.append(int(split_line[0]))
					boundable_lookback.append(int(split_line[1]))
					found_boundable.append(bool(split_line[2]))
		return timestamps, boundable_lookback, found_boundable
	except IOError:
		print("File does not exist yet")
		return None

def write_to_file(filename, timestamps, boundable_lookback, found_boundable):
	str_builder = "timestamps boundable_lookback found_boundable\n"
	for ts, lookback, found in zip(timestamps, boundable_lookback, found_boundable):
		str_builder += "{} {} {}\n".format(ts, lookback, found)
	with open(filename, 'w+') as f:
		f.write(str_builder)
	return

def boundable_quick(nblocks, leftover_pod_pairs, new_training_tm, main_tm):
	to_delete_set = set()
	for i, j in leftover_pod_pairs:
		if new_training_tm[i][j] >= main_tm[i][j]:
			to_delete_set.add((i,j))
	leftover_pod_pairs -= to_delete_set
	return leftover_pod_pairs


## sees if for a given snapshot, how far back should we look before said snapshot can be covered
def historical_viewer(nblocks, traffic_matrix_snapshots, timestamps, fabric_name):
	assert(len(timestamps) == len(traffic_matrix_snapshots))
	total_snapshots = len(traffic_matrix_snapshots)
	boundable_lookback = [-1] * total_snapshots
	found_boundable = [False] * total_snapshots

	file_contents = read_in_file("fabric{}_boundability.txt".format(fabric_name))
	if file_contents is not None:
		boundable_lookback = file_contents[1]
		found_boundable = file_contents[2]
		assert(len(boundable_lookback) == total_snapshots)
		assert(len(found_boundable) == total_snapshots)

	# find first entry of false in found boundable
	first_false = 0
	for i in range(len(boundable_lookback)):
		if not found_boundable[i]:
			first_false = i
			break

	all_pod_pairs = set()
	for i in range(nblocks):
		for j in range(nblocks):
			if i != j:
				all_pod_pairs.add((i,j))


	step_progress = 0.05
	current_progress_bar = 0
	for tm_snapshot, tm_index in zip(traffic_matrix_snapshots[first_false:], range(first_false, total_snapshots, 1)):
		leftover_pod_pairs = copy.deepcopy(all_pod_pairs)
		for prior_snapshots in range(1, tm_index + 1, 1):
			leftover_pod_pairs = boundable_quick(nblocks, leftover_pod_pairs, traffic_matrix_snapshots[tm_index - prior_snapshots], tm_snapshot)
			if len(leftover_pod_pairs) == 0:
				boundable_lookback[tm_index] = prior_snapshots
				break
		found_boundable[tm_index] = True
		if float(tm_index) / total_snapshots >= current_progress_bar + step_progress:
			print("Progress : {}%".format(float(tm_index) / total_snapshots * 100))
			current_progress_bar = max(current_progress_bar + step_progress , float(tm_index + 1) / total_snapshots)
			write_to_file("fabric{}_boundability.txt".format(fabric_name), timestamps, boundable_lookback, found_boundable)
	write_to_file("fabric{}_boundability.txt".format(fabric_name), timestamps, boundable_lookback, found_boundable)
	return boundable_lookback

if __name__ == "__main__":
	# Load in the traces
	aggregation_window = 1
	## simply change the clusters here
	cluster_name = "C"
	cluster_alias = "database"
	aggregation_window = 1
	## simply change the clusters here
	if cluster_name == "A":
		cluster_alias = "database"
	elif cluster_name == "B":
		cluster_alias = "web"
	elif cluster_name == "C":
		cluster_alias = "hadoop"
	elif cluster_name == "combined":
		cluster_alias = "combined"
	else:
		print("Unrecognized ")
		sys.exit()
	
	tm_snapshots_protobuf_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/cluster{}/{}_aggregationwindow_{}.pb".format(cluster_name, cluster_alias, aggregation_window)
	valid_network_ids_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/cluster{}/cluster{}_pods.txt".format(cluster_name, cluster_name)
	#tm_snapshots_protobuf_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/clusterB/web_aggregationwindow_{}.pb".format(aggregation_window)
	#valid_network_ids_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/clusterB/clusterB_pods.txt"
	#tm_snapshots_protobuf_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/clusterC/hadoop_aggregationwindow_{}.pb".format(aggregation_window)
	#valid_network_ids_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/clusterC/clusterC_pods.txt"
	#tm_snapshots_protobuf_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/clustercombined/combined_aggregationwindow_{}.pb".format(aggregation_window)
	#valid_network_ids_filename = "/Users/minyee/src/facebook_dcn_traffic/traffic_matrices/clustercombined/clustercombined_pods.txt"
	traffic_matrices, valid_network_ids, timestamps = read_in_facebook_traffic(tm_snapshots_protobuf_filename, valid_network_ids_filename, aggregation_window)
	nblocks = len(valid_network_ids)
	traffic_matrices = traffic_matrices[1:-1]
	timestamps = timestamps[1:-1]
	boundable_lookbacks = historical_viewer(nblocks, traffic_matrices, timestamps, cluster_name)
	fig = plt.figure()
	plt.plot(boundable_lookbacks)
	exit()


	pod_ingress_egress_traffic(nblocks, traffic_matrices)
	#traffic_matrices = traffic_matrices[:min(3000, len(traffic_matrices))]
	starting_point = int((float(12 - 7) / 24) * len(traffic_matrices))
	training_length = 2 * 60 * 60
	boundability_analysis(nblocks, traffic_matrices, num_representative_tms, training_indices=(starting_point,starting_point + training_length), evaluation_indices=(starting_point + training_length + 1,len(traffic_matrices) - 1))
	#pod_pair_timeseries_analysis(nblocks, aggregation_window, traffic_matrices)
