#!/usr/bin/env python

import numpy as np
import time
import os

def enc_expanding_windows(windows, generation_size, field_size):
	window_size = windows[np.random.randint(len(windows))]
	enc = np.random.randint(field_size, size=(1,window_size))
	zeros = np.zeros((1,generation_size-window_size))
	enc_vector = np.hstack((enc, zeros))
	return enc_vector
	

def create_source_packets(generation_size):
	return np.matrix(
			np.random.randint(256, size=(generation_size,1))
			)


def encode(source_packets, generation_size, field_size, windows, encoding_func): ## encoding_func must take args (windows, generation_size, field_size) and return encoding vector of type np.array or np.matrix
	enc_vector = np.matrix(
			encoding_func(windows, generation_size, field_size)
				)

	enc_packet = enc_vector*source_packets
	return enc_packet, enc_vector


def stack(enc_packet, enc_vector, enc_matrix, enc_packets):
	if not enc_matrix.any(): enc_matrix = enc_vector
	else: enc_matrix = np.vstack((enc_matrix, enc_vector))
	if not enc_packets.any(): enc_packets = enc_packet
	else: enc_packets = np.vstack((enc_packets, enc_packet))

	return enc_matrix, enc_packets


def decode(enc_mat, enc_pkts):
	dec = np.linalg.pinv(enc_mat)*enc_pkts
	dec = dec.round()
	return dec


def check_equal(dec_src, src, windows):
	status = np.equal(dec_src, src)
	ret = np.zeros(len(windows))

	ret[0] = int(	status[:windows[0],0].all()	)  ### Convert True/False values to integers	
	for i in range(1,len(windows)):
		ret[i] = int(	status[windows[i-1]:windows[i],0].all()	)
	return ret


def save_to_log(succes_dict, field_size, generation_size, windows, cycles):
	if not os.path.exists('logs'): os.mkdir('logs')
	logfile_name = 'logs/f'+str(field_size)+'_g'+str(generation_size)+'_w'
	wins = ''
	for win in windows:
		wins += str(win)+'_'
	logfile_name += wins +'c'+str(cycles)
	unique = 2
	extension = '.dat'
	if os.path.exists(logfile_name+extension):
		while os.path.exists(logfile_name+'-'+str(unique)+extension):
			unique +=1
		logfile_name += '-'+str(unique)
	logfile_name += extension

	logfile = open(logfile_name, 'w')
	logfile.write('field_size = '+str(field_size)+'\n')
	logfile.write('generation_size = '+str(generation_size)+'\n')
	logfile.write('windows = '+wins.replace('_',' ')+'\n')
	logfile.write('FORMAT: packets\twindows_decoded\n')

	pkts = succes_dict.keys()
	pkts.sort()
	for pkt in pkts:
		s = str(pkt)
		w = succes_dict[pkt]
		for i in range(len(w)):
			s += '\t'+str(w[i])
		logfile.write(s+'\n')
	logfile.close()
	

def run_simulation(field_size, generation_size, windows, cycles, upper_limit, enc_func):
	succes_dict = {} # Will store results in dictionary as key:value pairs: packet_no:[eq_sum]

	print 'Running '+str(cycles)+' cycles with generation size '+str(generation_size)+', field size '+str(field_size)+', and windows '+str(windows)

	for cykel in range(cycles):
		src = create_source_packets(generation_size)
		enc_matrix = np.zeros((1,1))
		enc_packets = np.zeros((1,1))
		eq = np.zeros(len(windows))
		for packets in range(1,upper_limit+1):
			enc_p, enc_v = encode(src, generation_size, field_size, windows, enc_func) # expanding_windows)
			enc_matrix, enc_packets = stack(enc_p, enc_v, enc_matrix, enc_packets)

			if (packets >= windows[0] and not eq.all()):
				decoded_src = decode(enc_matrix, enc_packets)
				eq = check_equal(decoded_src, src, windows)
			if not succes_dict.has_key(packets):				
				succes_dict[packets] = np.zeros(len(windows))
			succes_dict[packets] += eq
	save_to_log(succes_dict, field_size, generation_size, windows, cycles)

def create_windows(sorting_list, generation_size):
	windows = []
	for p in sorting_list:
		windows.append(int(p*generation_size))
	windows.append(generation_size)

	return windows
	

if __name__ == '__main__':
	
	generation_sizes = [40, 80, 160]
	field_sizes = [2, 256]
	windows = [[0.3],[0.4],[0.5],[0.6],[0.7],[0.4,0.7],[0.2,0.6],[0.6,0.8]] ## [0.3] means a seperation into 2 chunks. 30% important data. [0.4, 0.7] means a seperation into 3 chunks. 40% vital data, 30% important data. Percentages are summed => expanding windows (rest-percentage is least important data) 
	cycles = 1
	upper_limit = 2 # * g_size
	

	for field_size in field_sizes:
		for generation_size in generation_sizes:
			for win in windows:
				window = create_windows(win, generation_size)
				run_simulation(field_size, generation_size, window, cycles, generation_size*upper_limit, enc_expanding_windows)

		






