import crcmod
import random
# from scapy.all import *
# from scapy.config import conf
#from p4runtime import *
import math
import shutil
import os
import re

hash1 = crcmod.mkCrcFun(0x104c11db7, initCrc=0, xorOut=0xFFFFFFFF)
hash2 = crcmod.mkCrcFun(0x11edc6f41, initCrc=0, xorOut=0xFFFFFFFF)
hash3 = crcmod.mkCrcFun(0x1a833982b, initCrc=0, xorOut=0xFFFFFFFF)
hash4 = crcmod.mkCrcFun(0x1814141ab, initCrc=0, xorOut=0xFFFFFFFF)
hash5 = crcmod.mkCrcFun(0x15a0849e7, initCrc=0, xorOut=0xFFFFFFFF)
hash6 = crcmod.mkCrcFun(0x128ba08bb, initCrc=0, xorOut=0xFFFFFFFF)
hash7 = crcmod.mkCrcFun(0x13b328ffb, initCrc=0, xorOut=0xFFFFFFFF)
hash8 = crcmod.mkCrcFun(0x10e92c2cd, initCrc=0, xorOut=0xFFFFFFFF)
hash9 = crcmod.mkCrcFun(0x11c46e3df, initCrc=0, xorOut=0xFFFFFFFF)
hash10 = crcmod.mkCrcFun(0x118e8dda9, initCrc=0, xorOut=0xFFFFFFFF)


INF = 2**32 - 1


def xor_str(a, b):
	if None == a:
		print("None == a")
		return b
	if None == b:
		print("None == b")
		return a
	temp = bytearray(len(a))
	for i in range(len(a)):
		temp[i] = a[i]^b[i]
	return temp

def get_rand_str(length = 15):
	"""Return a string of the given length"""
	s = "abcdefghijklmnopqrstuvwxyxABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
	temp = ""
	for j in range(length):
		idx = random.randint(0, len(s) - 1)
		temp = temp + s[idx]
	return temp


def is_ip_add(s):
	"""Given a string representing an IP address, this function will determine
	if this string can be converted into an IP address."""
	items = s.split(".")
	if 4 != len(items):
		return False
	for item in items:
		if False == item.isdigit():
			return False
		if int(item) < 0 or int(item) > 255:
			return False
	return True

def is_port_number(s):
	"""Given a string representing an port number, this function will 
	determine if this string can be converted into an IP address."""
	if False == s.isdigit():
		return False
	if int(s) < 0 or int(s) > 65535:
		return False
	return True

def getFlows(pkts, n_pkts = -1):
	flows = dict()
	if n_pkts < 0:
		n_pkts = len(pkts)
	for i in range(n_pkts):
		p = pkts[i]
		if p in flows:
			flows[p] = flows[p] + 1
		else:
			flows[p] = 1
	return flows

def hash_table(n_flows = 100000, n_cells = 100000, n_hashes = 3):
	"""Calculate the ratio of datums cached in the hash table 
		over the total number of the datums."""
	seeds = []
	for i in range(n_hashes):
		s = get_rand_str(length = 20)
		seeds.append(s)
	table = [0]*n_cells
	for i in range(n_flows):
		s = get_rand_str(length=20)
		for j in range(n_hashes):
			s = s + seeds[j]
			idx = hash1(s)%n_cells
			if 0 == table[idx]:
				table[idx] = 1
				break
	return sum(table)/float(n_cells)

def pipelined_tables_utilization_sim(n_hashes, alpha, n_flows, n_cells):
	"""Calculate the ratio of datums cached in the hash table 
		over the total number of the datums."""
	seeds = []
	for i in range(n_hashes):
		s = get_rand_str(length = 20)
		seeds.append(s)
	delta = 0.0
	for i in range(n_hashes):
		delta = delta + (alpha**i)
	delta = n_cells/delta
	offset = [0]*(n_hashes+1)
	sub_size = [0]*n_hashes
	for i in range(1, n_hashes+1):
		temp = int(delta*(alpha**(i-1)))
		offset[i] = offset[i - 1] + temp
		sub_size[i-1] = temp
	n_cells = offset[n_hashes]
	print("n_cells:", n_cells)
	table = [0]*n_cells
	for i in range(n_flows):
		s = get_rand_str(length=20)
		for j in range(n_hashes):
			s = s + seeds[j]
			idx = offset[j] + hash1(s)%sub_size[j]
			if 0 == table[idx]:
				table[idx] = 1
				break
	return sum(table)/float(n_cells)

def pipelined_tables_utilization_theo(depth, alpha, ratio):
	"""Given the depth of the hierarchical hash table, the hierarchy 
		coefficient, and the ratio of number of items over the number of 
		buckets, calculate the utilization of the hash table.
		depth: the depth as well as the number of hash functions of the
		herarchical hash table.
		alpha: the hierarchy coefficient of the hash table
		ratio: the ratio of the number of items over the number of cells
		"""
	e = 2.71828
	delta = 0.0
	for i in range(depth):
		delta = delta + (alpha**i)
	p = [0]*(depth + 1)
	p[1] = (1.0/e)**(ratio*delta)
	for i in range(2, depth + 1):
		p[i] = (e**((1 - p[i - 1])/alpha))*(p[i - 1]**(1.0/alpha))
	empty_rate = 0.0
	for i in range(1, depth + 1):
		empty_rate = empty_rate + p[i]*(alpha**(i - 1))
	empty_rate = empty_rate/delta
	utilization = 1.0 - empty_rate
	return utilization


def hi_hash_table(alpha = 0.6, n_cells = 100000, n_flows = 100000):
	"""Simulate a hierarchical hash table with multiple hash functions. 
	alpha: the decreasing exponent between the sizes of neighboring hash tables.
	n_cells: total number of cells of the hash table.
	n_flows: total number of flows that will be fed into the hash table.
	output: this function will output the cache hit rate."""
	table = [0]*n_cells
	sub_len_1 = int(n_cells/(1 + alpha + alpha*alpha))
	sub_len_2 = int(alpha*n_cells/(1 + alpha + alpha*alpha))
	sub_len_3 = int(alpha*alpha*n_cells/(1 + alpha + alpha*alpha))
	for i in range(n_flows):
		s = get_rand_str(length = 20)
		idx1 = hash1(s)%sub_len_1
		idx2 = sub_len_1 + hash2(s)%sub_len_2
		idx3 = (sub_len_1 + sub_len_2) + hash3(s)%sub_len_3
		if 0 == table[idx1]:
			table[idx1] = 1
			continue
		if 0 == table[idx2]:
			table[idx2] = 1
			continue
		if 0 == table[idx3]:
			table[idx3] = 1
			continue
	return sum(table)/float(n_flows)

def mu_hash_table(n_cells = 100000, n_flows = 100000):
	"""Calculate the performance of a hash table with multiple hash functions.
		n_cells: the number of cells of the hash table.
		n_flows: the number of flows that will be fed into the hash table.
		output: the cache hit rate of the hash table."""
	table = [0]*n_cells
	for i in range(n_flows):
		s = get_rand_str(20)
		idx1 = hash1(s)%n_cells
		idx2 = hash2(s)%n_cells
		idx3 = hash3(s)%n_cells
		if 0 == table[idx1]:
			table[idx1] = 1
			continue
		if 0 == table[idx2]:
			table[idx2] = 1
			continue
		if 0 == table[idx3]:
			table[idx3] = 1
			continue
	return sum(table)/float(n_flows)

def count_min(filename, n_flows = 1000, n_columns = 1000):
	"""Create a count-min sketch, feed a number of flows into the sketch. 
	Finally query the sketch and calculate the average relative error 
	of the result of the query.
	filename: the name of the trace file from witch the flows are extracted
	n_flows: the number of flows that will be feed into the skech.
	n_columns: the number of cells in each row of the sketch.
	n_queries: the number of flows that will be queried.
	"""
	n_rows = 4
	table = [0]*n_columns*n_rows
	count = 0
	with open(filename, "r") as f:
		for line in f:
			if "#" == line[0]:
				continue
			line = line.replace(",", "")
			items = line.split(" ")
			pkt_cnt = int(items[17])
			if 0 == pkt_cnt:
				continue
			pkt_id = " ".join([items[1], items[2], items[4], items[5], items[6]])
			idx1 = hash1(pkt_id)%n_columns
			idx2 = hash2(pkt_id)%n_columns + n_columns
			idx3 = hash3(pkt_id)%n_columns + 2*n_columns
			idx4 = hash4(pkt_id)%n_columns + 3*n_columns
			table[idx1] = table[idx1] + pkt_cnt
			table[idx2] = table[idx2] + pkt_cnt
			table[idx3] = table[idx3] + pkt_cnt
			table[idx4] = table[idx4] + pkt_cnt
			count = count + 1
			if count >= n_flows:
   				break
	are = 0.0
	count = 0
	with open(filename, "r") as f:
		for line in f:
			if "#" == line[0]:
				continue
			line = line.replace(",", "")
			items = line.split(" ")
			pkt_cnt = float(items[17])
			if 0 == pkt_cnt:
				continue
			pkt_id = " ".join([items[1], items[2], items[4], items[5], items[6]])
			idx1 = hash1(pkt_id)%n_columns
			idx2 = hash2(pkt_id)%n_columns + n_columns
			idx3 = hash3(pkt_id)%n_columns + 2*n_columns
			idx4 = hash4(pkt_id)%n_columns + 3*n_columns
			temp_cnt = min(table[idx1], table[idx2], table[idx3], table[idx4])
			are = are + abs(temp_cnt - pkt_cnt)/pkt_cnt
			count = count + 1
			if count >= n_flows:
				break
	are = are/n_flows
	print(n_flows, are)


def flow_radar(alpha=1.24):
	"""Simulate the processing of FlowRadar"""
	m = 1000
	n = int(m*alpha)
	flow_cnt_array = [0]*n
	flow_set_array = [None]*n
	for i in range(m):
		s  = get_rand_str(length = 20)
		idx1 = hash1(s)%n
		idx2 = hash2(s)%n
		idx3 = hash3(s)%n
		flow_cnt_array[idx1] = flow_cnt_array[idx1] + 1
		flow_cnt_array[idx2] = flow_cnt_array[idx2] + 1
		flow_cnt_array[idx3] = flow_cnt_array[idx3] + 1
		if None == flow_set_array[idx1]:
			flow_set_array[idx1] = bytearray(s)
		else:
			flow_set_array[idx1] = xor_str(flow_set_array[idx1], bytearray(s))
		if None == flow_set_array[idx2]:
			flow_set_array[idx2] = bytearray(s)
		else:
			flow_set_array[idx2] = xor_str(flow_set_array[idx2], bytearray(s))
		if None == flow_set_array[idx3]:
			flow_set_array[idx3] = bytearray(s)
		else:
			flow_set_array[idx3] = xor_str(flow_set_array[idx3], bytearray(s))

	flow_cnt = 0
	flag = True
	print("max:", max(flow_cnt_array))
	while flag:
		flag = False
		for i in range(n):
			if 1 == flow_cnt_array[i]:
				flag = True
				s = flow_set_array[i]
				idx1 = hash1(str(s))%n
				idx2 = hash2(str(s))%n
				idx3 = hash3(str(s))%n
				flow_cnt_array[idx1] = flow_cnt_array[idx1] - 1
				flow_cnt_array[idx2] = flow_cnt_array[idx2] - 1
				flow_cnt_array[idx3] = flow_cnt_array[idx3] - 1
				flow_set_array[idx1] = xor_str(flow_set_array[idx1], s)
				flow_set_array[idx2] = xor_str(flow_set_array[idx2], s)
				flow_set_array[idx3] = xor_str(flow_set_array[idx3], s)
				flow_cnt = flow_cnt + 1
	return flow_cnt/float(m)

def takeSecond(elem):
	return elem[1]

def rank_set(s):
	"""Given a set, rank the keys according to the corresponding values, and
		return the rank information as another set"""
	lst = []
	for key in s:
		lst.append((key, s[key]))
	lst.sort(key = takeSecond, reverse = True)
	value = lst[0][1] - 1
	rank = 0
	rankset = {}
	for item in lst:
		if item[1] > value:
			value = item[1]
			rank = rank + 1
		rankset[item[0]] = rank
	return rankset
	 	
def num_to_ip(num):
	"""Convert a 32-bit number to an IP address"""
	lst = []
	for i in range(4):
		temp = num%256
		num = num/256
		lst = [str(temp)] + lst
	return '.'.join(lst)

def ip_to_num(s):
	"""Convert an IP address to a 32-bit number"""
	items = s.split(".")
	if 4 != len(items):
		print("Format Error!")
		return
	num = 0
	for item in items:
		num = num*256 + int(item)
	return num

def replay(filename, rand = True, n_flows = 100000, s_turn = True):
	"""Replay the network traffic according to a trace file
	filename: the trace file of the network traffic
	rand: generate the packets in a random order if true
	n_flows: the number of concurrent flows
	s_turn: stop after n_flows are completed if true, otherwise stop only the whole file is completed"""
	if not rand:
		random.seed(0)
	# read the trace file into 'records'
	flowList = []
	with open(filename, "r") as f:
		for line in f:
			if '#' == line[0]:
				continue
			line = line.replace(",", "")
			items = line.split(" ")
			if 0 == int(items[17]):
				continue
			# each flow has the format of [srcAddr, dstAddr, protocol, srcPort, dstPort, pktCnt]
			flow =[items[1], items[2], int(items[4]), 
				int(items[5]), int(items[6]), int(items[17])]
			flowList.append(flow)
	# ready to replay the trace
	sock = conf.L2socket(iface = 'veth1')
	while len(flowList) > 0:
		if len(flowList) >= n_flows:
			work_set = flowList[0:n_flows]	
		else:
			work_set = flowList[0:len(flowList)]
		del flowList[0:len(work_set)]
		t1 = time.time()	
		while len(work_set) > 0:
			idx = random.randint(0, len(work_set) - 1)
			srcAddr = work_set[idx][0]
			dstAddr = work_set[idx][1]
			protocol = work_set[idx][2]
			srcPort = work_set[idx][3]
			dstPort = work_set[idx][4]
			work_set[idx][5] = work_set[idx][5] - 1
			if 0 == work_set[idx][5]:
				del work_set[idx]
			p = Ether()/IP(src = srcAddr, dst = dstAddr, proto = protocol) / TCP(sport = srcPort, dport = dstPort)/ "aaaaaaaaaaaaaaaaa"
			sendp(p, iface='veth1', verbose = 0, socket = sock)
		t2 = time.time()
		print("time:", t2 - t1)
		if s_turn:
			break
	sock.close()

def get_workset(tracefile, n_flows, s_factor = 1.0):
	"""Get a specified number of flows from the trace file
	n_flows: the number of flow to get, all the flows is n_flows is None
	s_factor: the factor used to scale the packet count of a flow"""
	flowlist = []
	with open(tracefile, "r") as f:
		for line in f:
			if "#" == line[0]:
				continue
			items = line.split(" ")
			flowid = ";".join([items[0],items[1],items[2],items[3],items[4]])
			pktcnt = int(int(items[5])*s_factor)
			if 0 == pktcnt:
				pktcnt = 1
			flowlist.append([flowid, pktcnt])
			if len(flowlist) == n_flows:
				break
	if len(flowlist) < n_flows:
		print("Error: the number of flows is smaller than n_flows")
		exit()
	workset = {}
	for i in range(n_flows):
		flowid = flowlist[i][0]
		pktcnt = flowlist[i][1]
		if flowid in workset:
			workset[flowid] = workset[flowid] + pktcnt
		else:
			workset[flowid] = pktcnt
	return workset


def evaluation(alg, hh_thresh, tracefile, n_flows, s_factor):
	"""Evaluate the performance of the network measurement algorithms
	alg: measurement algorithm whose performance we will be evaluated
	hh_thresh: the threshold for heavy hitters
	tracefile: the trace file of network traffic to compare against
	n_flows: the number of flows that will be extracted from the trace file
	s_factor: the factor used to scale the packet count of a flow"""
	if HASHFLOW == alg or HASHFLOW_ORIGINAL == alg:
		flowset_est = get_flowset_hashflow()
		cardinality = get_cardinality_hashflow()
	elif HASHPIPE == alg:
		flowset_est = get_flowset_hashpipe()
		cardinality = get_cardinality_hashpipe()
	elif ELASTICSKETCH == alg:
		flowset_est = get_flowset_elasticsketch()
		cardinality = get_cardinality_elasticsketch()
	elif FLOWRADAR == alg:
		flowset_est = get_flowset_flowradar()
		cardinality = get_cardinality_flowradar()
	else:
		print("Error: illegal algorithm")
	flowset_real = get_workset(tracefile, n_flows, s_factor)
	
	# calculate the Relative Error of cardinality
	card_re = abs(cardinality - len(flowset_real))/float(len(flowset_real))
	print("re_of_cardinality: " + str(card_re))

	# calculate the flowset coverage of flow monitoring
	fsc = float(len(flowset_est))/len(flowset_real)
	print("fsc_of_flow_monitoring: " + str(fsc))

	# calculate the F1 score of the heavy hitters detection.
	heavy_hitter_real = {}
	heavy_hitter_est = {}
	hh_thresh = int(hh_thresh*s_factor)
	for key in flowset_real:
		if flowset_real[key] >= hh_thresh:
			heavy_hitter_real[key] = flowset_real[key]
	for key in flowset_est:
		if flowset_est[key] >= hh_thresh:
			heavy_hitter_est[key] = flowset_est[key]
	n_true_positive = 0
	for item in heavy_hitter_est:
		if item in heavy_hitter_real:
			n_true_positive = n_true_positive + 1
	if 0 == n_true_positive:
		f1_score = 0.0
	else:
		precision = float(n_true_positive)/len(heavy_hitter_est)
		recall = float(n_true_positive)/len(heavy_hitter_real)
		f1_score = 2*precision*recall/(precision + recall)
	print("f1_score_of_heavy_hitter_detection: " + str(f1_score))

	# calculate the Average Relative Error of heavy hitters
	size_are = 0.0
	for flow in heavy_hitter_real:
		size1 = heavy_hitter_real[flow]
		size2 = 0
		if flow in flowset_est:
			size2 = flowset_est[flow]
		size_are = size_are + abs(size1 - size2)/float(size1)
	size_are = size_are/len(heavy_hitter_real)
	print("are_of_heavy_hitters: " + str(size_are))

	# calculate the Average Relative Error of flow size estimation
	are = 0.0
	for flow in flowset_real:
		size1 = flowset_real[flow]
		size2 = 0
		if flow in flowset_est:
			size2 = flowset_est[flow]
		are = are + abs(size1 - size2)/float(size1)
	are = are/len(flowset_real)
	print("are_of_flow_size_estimation: " + str(are))


def dump(alg):
	"""Dump the data structures of the network measurement algorithms
	alg: the algorithm whose data structures will be dumped"""
	if alg == HASHFLOW_ORIGINAL:
		dump_hashflow_original()
	if alg == HASHFLOW:
		dump_hashflow()
	elif alg == HASHPIPE:
		dump_hashpipe()
	elif alg == FLOWRADAR:
		dump_flowradar()
	elif alg == ELASTICSKETCH:
		dump_elasticsketch()
	else:
		print("Unrecognized algorithm.")

def controller(alg, hh_thresh, tracefile, n_flows, s_factor):
	"""alg: measurement algorithm whose performance we will be evaluated
	hh_thresh: the threshold for heavy hitters
	tracefile: the trace file of network traffic to compare against
	n_flows: the number of flows that will be extracted from the trace file
	s_factor: the factor used to scale the packet count of a flow"""
	try:
		shutil.rmtree("./data")
	except OSError:
		pass
	os.makedirs("./data")
	print("n_flows: %d" % n_flows)
#	replay(filename, n_flows = n_flows)
	dump(alg)
	evaluation(alg, hh_thresh, tracefile, n_flows, s_factor)

def sift_list(lst):
	"""Sift away the empty items from the list"""
	new_lst = []
	for item in lst:
		if len(item) > 0:
			new_lst.append(item)
	return new_lst

def locate_item_in_list(lst, item):
	"""Locate the item in a lst and return the indexes as a list"""
	ret = []
	for i in range(len(lst)):
		if item == lst[i]:
			ret.append(i)
	return ret

def dump_temp_register():
	"""Dump the data 'temp_register' defined by the algorithms."""
	args = get_parser().parse_args()
	standard_client, mc_client = thrift_connect(
		args.thrift_ip, args.thrift_port,
		RuntimeAPI.get_thrift_services(args.pre)
	)
	load_json_config(standard_client, args.json)
	runtime = RuntimeAPI(args.pre, standard_client, mc_client)
	temp = runtime.get_register("temp_register")
	return temp




if __name__ == "__main__":
	print(hash_table(n_hashes = 4))
