from peer import *
from peer import Node_failed
from distributed_kmeans import kmeans_node
import threading
import time
import random
import sys
import socket
import os
from interface import Interface
import time
import datetime
import csv
import math
#from threading import Lock

arguments=sys.argv
'''if len(arguments)!=5:
    print "the program is terminated because input parameters are not efficient"
    print "Four parameters are required: threshold, neighborhood information, file name of local database, result file name"
    sys.exit(0)'''


if not (len(arguments)==4 or len(arguments)==10):
    print "the program is terminated because input parameters are not efficient"   
    sys.exit(0)
    
central_server_ip=arguments[1]
central_server_port=int(arguments[2])
#database_filename=arguments[3]
#threshold=float(arguments[3])



#if len(arguments)==4:
#    result_file_name=arguments[3]
#else:
#    result_file_name=arguments[9]





#==============================================================================================================
neighbor=[] #varibale to store ip, port information of neigbors
centroids={} #variable to store local centroids for every iteration
centroids_counts={}#variable to store local centroids counts for every iteration
ITERATION=1 # variable to trace ITERATION number
virtual_centroids_count=0 #variable to trace centroids counts number
calculate_centroids_count=0#variable to record how many local centroids count have been calculated
node_one=None # DKmeans object
pollsent=0 # varibale to record now send poll message for which iteration
poll_iteration_record =0 # variable to record the recent poll message heard from neighbors
wait_counts={} # variable to record how many times a POLL message has waited for, drop nodes who didnot reply
maxi_wait=5# Maximum times a poll message can wait
neighbor_centroids=[] # varibale to store received centroids information of neigbors, it is a list of dictionaries, each one corresponds to one neighbor
threshold=0.0002          #Default value, actually never used
collect_result_check=True
adding_history={} # variable to record which node is added in which iteration
neighbor_iteration_reponses=[] # variable to record the responses from neighbors to the message "ADDN"
database_lastrecord_len=0
load_time_input=[]
session_start_time=[]
finishing_clustering_flag=False
calculate_centroids_break=False
sendPoll_break=False
addCentroids_break=False
collect_result_break=False
database_check_break=False
node_initialize_started=False
flag_init_done=False   #This flag has been added to avoid the peer handles STAR msg before INIT msg
flag_start=False   #When the peer receive GETN,INIT or STAR msg, it sets this flag true
mutex_got=False   #To be master.The peer sets this flag true by receiving the MUTX msg from the server
num_exp=0   #Number of experiments (how many times the clustering has been done)
fk_slot=60*60*24       #Random chosen waiting time before starting the algorithm
index_domain=0
timestamp1=0
added_new_neighbor=True
neigbors_final_data=[]
stuck = False
log=False
res_date='0'

if arguments[9]=='log':
    log=True

s = socket.socket( socket.AF_INET, socket.SOCK_STREAM ) 
s.connect( ( "www.google.com", 80 ) ) 
local_ip = s.getsockname()[0] 
s.close()

listenport=10002
node_conn=BTPeerConnection(None, central_server_ip, central_server_port, None, debug=False)
peer=BTPeer(5,listenport,None,local_ip);



def timeout(BTconnection,data):
    peercon=BTconnection
    global stuck
    stuck = True
    response=['STUCK']
    print "Node is stuck --> ",response
    peercon.senddata('STUC', str(response))
    #time.sleep(120)

#==============================================================================================================
def Store_NeighborInfo(BTconnection,data):
    '''Handler for 'NEBO' message, to perform operations to store neighborhood information.'''
    global flag_start,neighbor_centroids
    neighbor_centroids=[]
    flag_start=True
    global neighbor
    neighbor=eval(data)
    print "My neighbors are",
    print neighbor
    for one in neighbor:
        neighbor_centroids.append({})
    print neighbor_centroids
    
    
def Get_Column_Selected(BTconnection,data):
    ''' Handler for 'INIT' message, to perform operations to store information about on which columns of the database to do the clustering'''
    global domain,log,res_date, log,start_datetime, start_time, end_datetime, end_time, column_selected, flag_init_done,threshold,flag_start,timestamp1
    flag_start=True
    timestamp1 = datetime.datetime(time.gmtime()[0],time.gmtime()[1],time.gmtime()[2],time.gmtime()[3],time.gmtime()[4])
    #print timestamp1
    clustering_info=eval(data)[0]
    print "------------------------ Handler INIT --------------------------"
    print "Clustering info =",
    print clustering_info
    domain=clustering_info[0][0]
    start_datetime=clustering_info[0][1][0]
    tmp=start_datetime.split(',')
    start_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
    end_datetime=clustering_info[0][1][1]
    tmp=end_datetime.split(',')
    end_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
    column_selected=clustering_info[1]
        
    #arguments=sys.argv
    central_server_ip=arguments[1]
    central_server_port=int(arguments[2])
    #database_filename=arguments[3]
    threshold=float(clustering_info[2])
    print "Threshold is",threshold
    log=clustering_info[3]
    print "LOG VALUE IS",log
    res_date=clustering_info[4]
    print "filename",res_date
    #if len(arguments)==4:
    #    result_file_name=arguments[3]
    #else:
    #    result_file_name=arguments[9]
    
    
        
    flag_init_done = True
    print "Flag_init_done = TRUE\n"

def get_initstate(BTconnection,data):
    '''Handler for 'STAR' message, receive initialization information and initialize kmeans node'''
    global flag_init_done,flag_start,load_time_input,session_start_time,query_interface
    flag_start=True
    peercon=BTconnection
    
    msg=eval(data)
    initial_state=msg[0]
    
    while flag_init_done == False:
        print "\nWaiting to handle INIT Msg !!!!\n"
        continue
    global domain, start_time, end_time,database_lastrecord_len,threshold,log
    paras=[domain,[start_time,end_time],log]
    
    my_timer = threading.Timer(60.0, timeout, args=[peercon,data])
    my_timer.start()
    
    query_interface=Interface()
    database=query_interface.SQqueryData (paras)
    
    
    #time.sleep(20)
    global stuck
    if not database or stuck:
        print "Error in data fetching........"
        time.sleep(60*60*24)
    
    else:
        #print database
        database_lastrecord_len=len(database[0])
        print "------------------------ Handler STAR -----------------------------" 
        print 'Database_lastrecord_len is --> ', database_lastrecord_len
        #database = open(database_filename,'r') 
        global database_input
        database_input=[]
    
        load_time_input=database[4]
        session_start_time=database[5]
    
        #print "load_time-->",load_time_input
    
        for i in range (len(database[0])):
            tmp=[]
            for one in column_selected:
                tmp.append(database[one][i])
            database_input.append(tmp)
        #print database_input
        #threshold=0.02
        #lock=threading.Lock()
        global node_one
        #lock.acquire()
        node_one=kmeans_node(initial_state,database_input,threshold)
        
        #time.sleep(5)
        
        global centroids
        centroids[0]=node_one.centroids[0]
        print 'Centroids[0] is', centroids,"\n"
        
        my_timer.cancel()
        print "Timer removed"
    
        
    
def get_centroids(BTconnection,data):
    '''Handler for 'POLL' message, return local centroids (if it is available) as response to POLL message'''
    global flag_start
    flag_start=True
    peercon=BTconnection
    msg=eval(data)
    num_of_iteration=msg[0]
    src=msg[1]
    global poll_iteration_record,node_one
    poll_iteration_record=num_of_iteration
    #if node_one != None:
    #length=len(node_one.centroids_counts)
    print "------------------------ Handler POLL -------------------------------"
    if node_one == None:
        response=[num_of_iteration,'wait', src]
        print "Node non initialized yet --> ",response
        peercon.senddata('RESP', str(response))
    else:
        #node_one.terminate_check(threshold)
        if (len(node_one.centroids)>1 and node_one.termination):
            tmp_centroids=node_one.centroids_counts.pop()
            node_one.centroids_counts.append(tmp_centroids) 
            response=[num_of_iteration,tmp_centroids,src]
            print "First response",response,"\n"
            peercon.senddata('RESP', str(response))
        else:
            if not centroids_counts.has_key(num_of_iteration-1):
                response=[num_of_iteration,'wait', src]
                print "Second response",response,"\n"
                peercon.senddata('RESP', str(response))
    
            else:
                centroids=centroids_counts[num_of_iteration-1]
                response=[num_of_iteration,centroids, src]
                print "Third response",response,"\n"
                peercon.senddata('RESP', str(response))

def store_neibor_centroids(BTconnection,data):
    '''Handler for RESP message, store received neighbor centroids information'''
    peercon=BTconnection
    msg=eval(data)
    itera=msg[0]
    print "------------------------------- Handler RESP ---------------"
    print "Received Message --> ",msg
    if 'wait' in data:
        time.sleep(5)
        data_to_send=[itera, msg[2]]
        print 'Wait received - Resend poll msg --> ',str(data_to_send),"\n"
        peercon.senddata('POLL', str(data_to_send))
        '''if wait_counts.has_key(itera):
            wait_counts[itera]=wait_counts[itera]+1
        if not wait_counts.has_key(itera):
            wait_counts[itera]=1
        if wait_counts[itera]<= maxi_wait:
            data_to_send=[itera, msg[2]]
            peercon.senddata('POLL', str(data_to_send))'''
    else:
        neighbor_ip=msg[2][0]
        neighbor_port=msg[2][1]
        tmp_centroids=msg[1]
        count=0
        for one in neighbor:
            if one[0]==neighbor_ip and one[1]==neighbor_port:
                break  
            count=count+1
        print "Neighbor centroids --> ",neighbor_centroids
        neighbor_centroids[count][itera]=tmp_centroids
        #
        print "\n"

def dynamic_start(BTconnection,data):
    '''If the node joins the clustering after the algorithm starts, it will enter the mode of dynamic start.'''
    global neighbor,neighbor_centroids,flag_start
    flag_start=True
    neighbor=eval(data)
    print "Dynamic start, my neighbor are ",neighbor
    for one in neighbor:
        neighbor_centroids.append({})

def add_request_handler(BTconnection,data):
    '''Handler for ADDN message, send back current ITERATION number and local centriods if it is not terminated, or current POLL number heard from neigbors and last local centroids if it is already terminated'''
    peercon=BTconnection
    global domain, start_time, end_time, column_selected,start_datetime, end_datetime,threshold
    global database_input,added_new_neighbor
    #ip_port=data.split(' ',1)
    src=eval(data)[-1]
    print "------------------------------- Handler ADDN ---------------"
    print 'Add request comes from', src
    global neighbor,ITERATION,centroids,node_one,virtual_centroids_count,pollsent,centroids_counts,calculate_centroids_count, collect_result_check
    #neighbor.append([ip,port,ITERATION+1])
    column_selected.append(threshold)
    if not node_one.terminate_check:
        response=[ITERATION,centroids[ITERATION],domain, start_datetime, end_datetime, column_selected, src]
    else:
        last_iteration=max(k for k, v in centroids.iteritems())
        last_centroids=centroids[last_iteration]
        response=[poll_iteration_record,last_centroids,domain, start_datetime, start_datetime, column_selected, src]
        
        ITERATION=poll_iteration_record
        virtual_centroids_count=ITERATION-1
        pollsent=ITERATION-1
        added_new_neighbor=False
        print "control",added_new_neighbor
        calculate_centroids_count=ITERATION-1
        collect_result_check=True
        for one in neighbor_centroids:
            if one.has_key(ITERATION):
                one.pop(ITERATION)
                
        tmp_centroids=node_one.centroids.pop()
        node_one.centroids.append(tmp_centroids)
        centroids[virtual_centroids_count]=tmp_centroids
        node_one.termination=False
    global adding_history
    if adding_history.has_key(ITERATION):
        adding_history[ITERATION].append(src)
    else:
        adding_history[ITERATION]=[src]
        
    peercon.senddata('ADDI', str(response))
    print "Send back ADDI", response,"\n"

def add_reponse_handler(BTconnection,data):
    '''Handler for message ADDI, store neighbor answers for ADDN message'''
    iteration_centroids=eval(data)[0:7]
    global neighbor_iteration_reponses,neighbor
    neighbor_iteration_reponses.append(iteration_centroids)
    print "------------------------------- Handler ADDI ---------------"
    print "Neighbors response length =",len(neighbor_iteration_reponses)
    print neighbor_iteration_reponses
    print "I have",len(neighbor),"neighbors\n"

def conn_test(BTconnection,data):
    pass

def node_lost_handler(BTconnection,data):
    node_lost=eval(data)
    print "------------------------------- Handler LOST ---------------"
    print 'Found some lost node ', node_lost
    global neighbor
    print neighbor
    for one in node_lost:
        try:
            neighbor.remove(one)
            print "REMOVED -> ", one,"from neighbor"
            print "The new neighbor are --> ",neighbor
        except:
            print "Not able to remove lost peer"
            pass
    print "\n"

def up_to_date(BTconnection,data):
    if node_one != None:
        ITERATION=poll_iteration_record
        virtual_centroids_count=ITERATION
        tmp_centroids=node_one.centroids.pop()
        node_one.centroids.append(tmp_centroids)
        new_centroids=node_one.one_round_kmeans(tmp_centroids)
        node_one.local_centroid_counts(new_centroids)
        centroids_counts[ITERATION-1]=node_one.centroids_counts.pop()
        node_one.termination=False
        global collect_result_check
        collect_result_check=True
        print "Update is over"
        
#===============================================================================================================
def calculate_centroids():
     
    
    while(True):
        #"\nInside Calculate_centroids -----------"
        if node_one == None:
            continue
        else:
            global finishing_clustering_flag,calculate_centroids_count,added_new_neighbor
            if finishing_clustering_flag ==True:
                global calculate_centroids_break
                calculate_centroids_break=True
                print "Successfully break - No more calculating"
                
                break
            global virtual_centroids_count, ITERATION, centroids_count,centroids #adding_history
            #node_one.terminate_check(threshold)
            if (len(node_one.centroids)>1 and node_one.termination and ITERATION ==virtual_centroids_count):
                #print "while loop is broken because of calculate_centroids"
                #break
                continue
            else:
                if virtual_centroids_count < ITERATION:#len(node_one.centroids):
                    if virtual_centroids_count >= calculate_centroids_count:
                        itera=virtual_centroids_count
                        #print "\nCent2 --->  ", centroids,itera
                                                
                        while (True):
			    if len(centroids)<(itera+1):
			        print "error"
			        time.sleep(1) 
			        continue
			    else:
			        print "ok"
			        break
			    
                        new_centroids=node_one.one_round_kmeans(centroids[itera])
                        node_one.local_centroid_counts(new_centroids)
                        centroids_counts[itera]=node_one.centroids_counts.pop()
                        #print "Centroids counts --->  ",centroids_counts
                        node_one.centroids_counts.append(centroids_counts[itera])
                        virtual_centroids_count=virtual_centroids_count+1
                   
                        if adding_history.has_key(ITERATION):
                            for one in adding_history[ITERATION]:
                                neighbor.append(one)
                                neighbor_centroids.append({})
                            print "Successfully adding neighbor responses"
                            added_new_neighbor=True
                            print neighbor,added_new_neighbor
                            
                        #print node_one.centroids_counts
                        calculate_centroids_count=calculate_centroids_count+1
                        print "Iteration ---> ",
                        print ITERATION
                        
                        #print "Calculate_centroids --> ",
                        #print virtual_centroids_count,"\n"

def sendPoll():
    '''Thread of sendpoll, keep sending POLL message to neighbors to get their centroids'''
    while(True):
        global pollsent, virtual_centroids_count, ITERATION, node_one,added_new_neighbor
        if node_one == None:
            continue
        else:
            global finishing_clustering_flag
            if finishing_clustering_flag ==True:
                global sendPoll_break
                sendPoll_break=True
                break
            #node_one.terminate_check(threshold)
            if (len(node_one.centroids)>1 and node_one.termination):
                #print "while loop is broken because of sendpoll"
                #break
                continue
            else:
                if (virtual_centroids_count == ITERATION):
                    iteration=virtual_centroids_count
                    if (iteration > pollsent and added_new_neighbor) : 
                        print 'Inside sendpoll ---> Iteration = ',iteration
                        tmp_neighbor=[]
                        global neighbor
                        for one in neighbor:
                            tmp_neighbor.append(one)
                        for peer in tmp_neighbor:
                            data_to_send=[iteration, [peer[0],peer[1]]]
                            node_conn.senddata('POLL', str(data_to_send))
                            print "POLL sent to --> ",peer[1]
                        pollsent=iteration
                        print "\n"
def addCentroids():
    count=0
    while(True):
        if node_one==None:
            continue
        else:
            global finishing_clustering_flag
            if finishing_clustering_flag ==True:
                global addCentroids_break
                addCentroids_break=True
                break
            #node_one.terminate_check(threshold)
            if (len(node_one.centroids)>1 and node_one.termination):
                #print "while loop is broken because of addcentroids"
                #break
                continue
            else:
                global ITERATION,centroids,centroids_counts, neighbor,threshold
                itera=ITERATION
                neighbor_data=[]
                count=0
                num_of_neighbor=len(neighbor)
                #print "Num of neighbor-> ",num_of_neighbor
                for one in neighbor_centroids:
                    if one.has_key(itera):
                        count=count+1
                        neighbor_data.append(one[itera])
                        #print count
                if count==num_of_neighbor  and centroids_counts.has_key(itera-1):
                    neighbor_data.append(centroids_counts[itera-1])
                    node_one.weighted_centroid_calculate(neighbor_data,num_of_neighbor+1)
                    centroids[itera]=node_one.centroids.pop()
                    node_one.centroids.append(centroids[itera])
                    
                    time.sleep(15)
                    print "Cent1",node_one.centroids,itera
                    ITERATION=ITERATION+1
                    
                    node_one.terminate_check(threshold)
                    print "Threshold is", threshold
                    #print "\nInside AddCentroids ----------"
                    print "Termination --->  ",node_one.termination
                    count=count+1
                    global neigbors_final_data
                    if node_one.termination:
		        neigbors_final_data=neighbor_data    
                    print 'Adding centroids in iteration'
                    #print node_one.centroids
                    
def Node_Initialize():
    global neighbor_iteration_reponses, ITERATION, neighbor,threshold
    global domain, start_datetime, end_datetime, column_selected
    global node_initialize_started
    node_initialize_started=True
    
    global database_input
    while(True):
        time.sleep(2)
        #print "waiting"
        if (len(neighbor)==len(neighbor_iteration_reponses)and len(neighbor) !=0) :
	    
            break
    print "Break in node initialize"
    print len(neighbor),"is the len of neighbor"
    print len(neighbor_iteration_reponses),"is the lenght of the responses"
    iterations=[]
    
    for one in neighbor_iteration_reponses:
        if one != []:
            iterations.append(int(one[0]))
    
    minimum_iteration=min(iterations)
    
    ITERATION=minimum_iteration
    print ITERATION,"is the iteration"
    
    for one in neighbor_iteration_reponses:
        if minimum_iteration == int(one[0]):
            initial_state = one[1]
            
    global column_selected
    global domain, start_time, end_time, column_selected
    global database_input
    domain=neighbor_iteration_reponses[0][2]
    start_datetime=neighbor_iteration_reponses[0][3]
    end_datetime=neighbor_iteration_reponses[0][4]
    tmp=start_datetime.split(',')
    start_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
    tmp=end_datetime.split(',')
    end_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
    column_selected = neighbor_iteration_reponses[0][5][:2]
    print "Column_selected",column_selected
    #print neighbor_iteration_reponses
    
    threshold = neighbor_iteration_reponses[0][5][2]
    print "Threshold",threshold
    
    
    '''database = open(database_filename,'r') 
    global database_input
    database_input=[]
    for line in database.readlines():
        tmp=line.split()
        str2float=[float(x) for x in tmp] 
        selected_data=[]
        for one in column_selected:
            selected_data.append(str2float[one])
        database_input.append(selected_data)
    database.close()   # node_one database'''
    paras=[domain,[start_time,end_time]]
    query_interface=Interface()
    database=query_interface.SQqueryData (paras)
    global database_lastrecord_len
    database_lastrecord_len=len(database[0])
    global database_input
    database_input=[]
    #print database
    for i in range (len(database[0])):
        tmp=[]
        for one in column_selected:
            tmp.append(database[one][i])
        database_input.append(tmp)
    #print database_input
    
    global node_one
    node_one=kmeans_node(initial_state,database_input,threshold)
    
    global centroids
    for i in range(ITERATION): 
        centroids[i]= initial_state
    #print centroids
        
    global centroids_counts
    new_centroids=node_one.one_round_kmeans(initial_state)
    node_one.local_centroid_counts(new_centroids)
    tmp=node_one.centroids_counts.pop()
    node_one.centroids_counts.append(tmp)
    for i in range(ITERATION-1): 
        centroids_counts[i]= tmp
    virtual_centroids_count=len(centroids_counts)
    #print virtual_centroids_count
    #print centroids_counts
    global initialized
    initialized=True

def collect_result():
    global collect_result_check,result_file_name,threshold,num_of_cluster,neigbors_final_data
    global num_exp,mutex_got,timestamp1,load_time_input,session_start_time,domain,res_date
    while (True):
        if node_one==None:
            continue
        else:
            global finishing_clustering_flag
            if finishing_clustering_flag ==True:
                global collect_result_break
                collect_result_break=True
                break
            if (len(node_one.centroids)>1 and node_one.termination and ITERATION == virtual_centroids_count and collect_result_check == True):
                
                timestamp2 = datetime.datetime(time.gmtime()[0],time.gmtime()[1],time.gmtime()[2],time.gmtime()[3],time.gmtime()[4],time.gmtime()[5])
                print timestamp1,timestamp2
                timespent=timestamp2 - timestamp1
                print "Time Spent",(timespent)
                
                
                #print "global_centroids",node_one.global_centroids
                #print "neigbors_final_data",neigbors_final_data
                
                global_data=[]
                peer_data=[]
                for i in range(len(neigbors_final_data)):
		    for j in range(len(node_one.global_centroids)):
		        peer_data.append((node_one.global_centroids[j],neigbors_final_data[i][j][1]))
		        #print "peer_data",peer_data
		    global_data.append(peer_data)
		    peer_data=[]
		    #print "global_data",global_data
                
                #result_date=str(time.gmtime()[0])+str(time.gmtime()[1])+str(time.gmtime()[2])+'_'+str(time.gmtime()[3])+str(time.gmtime()[4])
                
                final=len(node_one.centroids_counts)-1
                cluster=node_one.centroids_counts[final]
                num_exp +=1
                FILE=open("results/results_clustering_"+domain+'_'+res_date+".log","w")
                tmp_string="Final cluster is"
                FILE.write(tmp_string)
                if mutex_got:
                    FILE.write(' ---> I am the Master, Threshold = '+str(threshold)+', K = '+str(num_of_cluster))
                #FILE.write('\n Local Clusters: '+str(cluster)+'\n')
                #FILE.write('\n Global Clusters: '+str(global_data[len(global_data)-1])+'\n')
                #for i in range(len(global_data)-1):
                    #FILE.write('\nPeer '+str(i+2)+': '+str(global_data[i])+'\n')
               
                FILE.write('\n'+"Convergence time --> "+str(timespent))
                FILE.write('\n'+"Rounds --> "+str(ITERATION)+'\n\n')
                
                 #### by Cui:
                if query_interface.dolog:
                    cluster_centroids=[ i[0] for i in list(global_data[len(global_data)-1]) ]
                    cluster_counters=[ i[1] for i in list(global_data[len(global_data)-1]) ]
                    global_raw_centroids=[]
                    for i in cluster_centroids:
                        centroids_raw_metrics = [10**x for x in i];
                        global_raw_centroids.append((centroids_raw_metrics,cluster_counters.pop(0)))
                        #FILE.write(str(centroids_raw_metrics)+ '  ' + str(cluster_counters.pop(0)) + '\n') # pop the 1st one
                    
                    FILE.write('\n LOG.Clustering = Yes. \n')
                    FILE.write('\n Global Raw Clusters: '+str(global_raw_centroids)+'\n\n')
                    
                    
                    #FILE.write('\n Global Clusters: '+str(global_data[len(global_data)-1])+'\n')
                    for i in range(len(global_data)-1):
                        #FILE.write('\nPeer '+str(i+2)+': '+str(global_data[i])+'\n')
                        cluster_centroids=[ x[0] for x in list(global_data[i]) ]
                        cluster_counters=[ x[1] for x in list(global_data[i]) ]
                        global_raw_centroids=[]
                        for j in cluster_centroids:
                            centroids_raw_metrics = [10**x for x in j];
                            global_raw_centroids.append((centroids_raw_metrics,cluster_counters.pop(0)))
                            #FILE.write(str(centroids_raw_metrics)+ '  ' + str(cluster_counters.pop(0)) + '\n') # pop the 1st one
                        FILE.write('\n Peers '+ str(i+2) + ': ' + str(global_raw_centroids)+'\n\n')
                else:
		    FILE.write('\n LOG.Clustering = No. \n')
		    FILE.write('\n Global Clusters: '+str(global_data[len(global_data)-1])+'\n')
                    for i in range(len(global_data)-1):
                        FILE.write('\nPeer '+str(i+2)+': '+str(global_data[i])+'\n')
                #### end of Cui
                

                node_one_cluster,node_one_distance,node_one_clustered=node_one.final_cluster();
                num=len(node_one_cluster)
                sim_value=[]
                for i in range(num):
		    sim_vector=[]
		    for j in range(len(node_one.global_centroids)):
		        sim_vector.append(cos_sim(database_input[i],node_one.global_centroids[j]))
                    
                    sim_value.append(sim_vector)
                    #sim_value.append([cos_sim(database_input[i],node_one.global_centroids[0]),cos_sim(database_input[i],node_one.global_centroids[1]),cos_sim(database_input[i],node_one.global_centroids[2]),cos_sim(database_input[i],node_one.global_centroids[3])])
                    
                    tmp_str=str(database_input[i])+'   '+str(node_one_cluster[i])+'   '+str(node_one_distance[i])+'   '+str(sim_value[i])+'\n'
                    
                    
                    #### by Cui
                    #tmp_str=str(database_input[i])+'   '+str(node_one_cluster[i])+'   '+str(node_one_distance[i])+'   '+str(sim_value[i])+'\n'
                    if query_interface.dolog:
                        raw_metrics = [10**x for x in database_input[i]];
                        tmp_str=str(raw_metrics)+'   '+str(node_one_cluster[i])+'   '+str(node_one_distance[i])+'   '+str(sim_value[i])+'\n'
                    #### end of Cui
                    
                    FILE.write(tmp_str)
                
                # Calculate the average of the cosine similarity for each cluster
                avg_sim=[]
                for i in range(len(cluster)):
                    a=0
                    j=0
                    for one in sim_value:
                        #print one
                        if one[1]==i+1:
                            a+= one[0]
                            j+=1
                    if j>0:
                        avg_sim.append(a/j)
                    else:
                        avg_sim.append(0.0)   
                 #FILE.write(str(avg_sim))
                
                
                #FILE.write(str(node_one_cluster)+'\n')
                #FILE.write('the distance of every node to its centre'+'\n')
                #FILE.write(str(node_one_distance)+'\n')
                #FILE.write('lists of nodes in different cluster'+'\n')
                #FILE.write(str(node_one_clustered)+'\n')
                FILE.close()
                
                c= csv.writer(open("results/results_data_"+domain+'_'+res_date+".csv","wb"))
                temp=[]
                for i in range(num):
                    
                    temp=database_input[i]
                    temp.append(node_one_cluster[i])
                    temp.append(session_start_time[i])
                    temp.append(load_time_input[i])
                    c.writerow(temp)

                c2= csv.writer(open("results/convergence_results.csv","a"))
                tmp2=[]
                tmp2.append(domain)
                #tmp2.append(num_of_cluster)
                tmp2.append(threshold)
                tmp2.append(ITERATION)
                tmp2.append(timespent)
                tmp2.append(res_date)
                c2.writerow(tmp2)
                                
                
                global centroids, centroids_counts
                print "Collect_Result -------------"
                print "Final centroids and centroids_counts printed on the logfile\n"
                #print centroids
                #print centroids_counts
                collect_result_check=False
                #global finishing_clustering_flag
                finishing_clustering_flag=True
                finishing_clustering()
                break

def database_check():
    while True:
        
        if node_one == None:
            continue
        else:
            global finishing_clustering_flag
            if finishing_clustering_flag ==True:
                global database_check_break
                database_check_break=True
                break
            #print 'inside database check'
            global domain, start_datetime, end_datetime,database_lastrecord_len
            tmp=start_datetime.split(',')
            start_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
            tmp=end_datetime.split(',')
            end_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
            paras=[domain,[start_time,end_time]]
            
            #query_interface=Interface()
            #database_new=query_interface.SQqueryData (paras)
            #database_lastchange_len=len(database_new[0])
            database_lastchange_len=database_lastrecord_len            

            #print database_lastchange_len
            noUpdate = True
            if (database_lastchange_len==database_lastrecord_len or noUpdate):
                continue
            else:
                print "Found database update",
                print database_lastchange_len,
                print database_lastrecord_len
                database_input=[]
                for i in range (len(database_new[0])):
                    tmp=[]
                    for one in column_selected:
                        tmp.append(database_new[one][i])
                    database_input.append(tmp)
                #print database_input
                node_one.database_change(database_input)

                database_lastrecord_len=database_lastchange_len
            
                ITERATION=poll_iteration_record
                virtual_centroids_count=ITERATION
                print "Virtual centroids count",
                print virtual_centroids_count
                print ITERATION
                #new start state
                tmp_centroids=node_one.centroids.pop()
                node_one.centroids.append(tmp_centroids)
                new_centroids=node_one.one_round_kmeans(tmp_centroids)
                node_one.local_centroid_counts(new_centroids)
                centroids_counts[ITERATION-1]=node_one.centroids_counts.pop()
                node_one.termination=False
                global collect_result_check
                collect_result_check=True
                for one in neighbor:
                    node_conn.senddata('CHAN', str([one]))
                    
def mutex_locking(BTconnection,data):
    '''Handler for message MUTX, once peer has sent GETN msg, 
       he must receive this msg otherwise I won't start the algorithm'''
    global mutex_got
    print "Mutex has been locked by me"
    mutex_got=True
    


def finishing_clustering():
    ''' Called by collect_result'''
    global central_server_ip, central_server_port, column_selected_string, domain,flag_init_done
    global neighbor, centroids, centroids_counts, ITERATION, virtual_centroids_count, calculate_centroids_count, node_one, pollsent, poll_iteration_record , wait_counts, maxi_wait, neighbor_centroids, collect_result_check, adding_history, neighbor_iteration_reponses, database_lastrecord_len,finishing_clustering_flag, calculate_centroids_break,sendPoll_break, addCentroids_break,collect_result_break, database_check_break
    while (calculate_centroids_break==False or sendPoll_break==False or addCentroids_break==False or  database_check_break==False):
        continue
    node_conn.senddata('FINI','')
    print "FINI msg sent"
    #time.sleep(5)
    

def ending(BTconnection,data):
    '''Handler for message ENDD, once server got all FINI msgs from everyone, 
       he will reply with ENDD msg'''
    global central_server_ip, central_server_port, column_selected_string, domain, start_time, end_time,threshold, flag_init_done,flag_start,mutex_got
    global neighbor, centroids, centroids_counts, ITERATION, virtual_centroids_count, calculate_centroids_count, node_one, pollsent, poll_iteration_record , wait_counts, maxi_wait, neighbor_centroids, collect_result_check, adding_history, neighbor_iteration_reponses, database_lastrecord_len,finishing_clustering_flag, calculate_centroids_break,sendPoll_break, addCentroids_break,collect_result_break, database_check_break
    
    clustering_group=eval(data)
    #print 'Inside ending handler'
    #global neighbor
    #print "Neighbor ->",neighbor
    print "Clustering is DONE !! Clustering group ->",clustering_group
    
    
    neighbor=[] #varibale to store ip, port information of neigbors
    centroids={} #variable to store local centroids for every iteration
    centroids_counts={}#variable to store local centroids counts for every iteration
    ITERATION=1 # variable to trace ITERATION number
    virtual_centroids_count=0 #variable to trace centroids counts number
    num_of_cluster=0
    calculate_centroids_count=0#variable to record how many local centroids count have been calculated
    node_one=None# DKmeans object
    pollsent=0 # varibale to record now send poll message for which iteration
    poll_iteration_record =0 # variable to record the recent poll message heard from neighbors
    wait_counts={} # variable to record how many times a POLL message has waited for, drop nodes who didnot reply
    maxi_wait=5# Maximum times a poll message can wait
    neighbor_centroids=[] # varibale to store received centroids information of neigbors, it is a list of dictionaries, each one corresponds to one neighbor
    print "Before ending the threshold is",threshold
    threshold=0.0002  #reset the default threshold 
    collect_result_check=True
    adding_history={} # variable to record which node is added in which iteration
    neighbor_iteration_reponses=[] # variable to record the responses from neighbors to the message "ADDN"
    database_lastrecord_len=0
    load_time_input=[]
    session_start_time=[]
    finishing_clustering_flag=False
    calculate_centroids_break=False
    sendPoll_break=False
    addCentroids_break=False
    collect_result_break=False
    database_check_break=False
    flag_init_done=False
    flag_start=False
    #print "FLAG_INIT_DONE and FLAG_START= FALSE"
    if mutex_got:
        multiple_clust()
    mutex_got=False
    main()
    


def random_initial():
    global num_of_cluster,database_input
    database_length=len(database_input)
    randominitial=[]
    #print num_of_cluster
    for i in range(eval(num_of_cluster)):
        index=random.randint(0, database_length-1)
        randominitial.append(database_input[index])
    return randominitial
 

def multiple_clust():
    global fk_slot,index_domain,multiple_domain
    if index_domain < (len(multiple_domain))-1:
        fk_slot=2
        
        index_domain=index_domain+1
    else:
        fk_slot= 60*60*24
        index_domain=0

def cos_sim(v0,v1):
    def prod_scal(v0, v1):
        sum=0
        for a, b in zip(v0, v1):
            sum+=a*b
        return sum
    
    def norm(v0):
        sum=0
        for a in v0:
            sum+=a*a
        return math.sqrt(sum)    
    
    return prod_scal(v0,v1)/(norm(v0)*norm(v1))

 
 
def main():
    global node_conn,query_interface, neighbor, column_selected,domain,start_time,end_time, database_input,node_one,centroids,flag_start,database_lastrecord_len
    global central_server_ip, central_server_port, column_selected_string, num_of_cluster, domain, start_datetime, end_datetime, threshold, log,node_initialize_started,slot,mutex_got,index_domain,multiple_domain,fk_slot
    calculating=threading.Thread(target = calculate_centroids, args = [] ) 
    sendpoll=threading.Thread(target = sendPoll, args = [] )
    addcentroids=threading.Thread(target=addCentroids,args=[])
    output_result=threading.Thread(target=collect_result,args=[])
    
    database_check_thread=threading.Thread(target=database_check,args=[])

    calculating.start()
    sendpoll.start()
    addcentroids.start()
    output_result.start()
    if not node_initialize_started:
        node_initialize=threading.Thread(target=Node_Initialize,args=[])
        node_initialize.start()
    database_check_thread.start()
    print "Thread pool started"
    
    
    
    #slot=random.randint(60,120 )
    slot=fk_slot
    while (slot>0 and not flag_start and len(arguments)!=4):
        slot -=1
        
        if not slot:
            #back_off=random.random()
            #print "ready to start in",str(back_off*10),"sec"
            #time.sleep(back_off*10)
          
          if flag_start:
            break
          else:
            print "Try to start new clustering !!!"
            #start_clustering=raw_input("enter clustering input:\n")
            
            
            print 'Before master_conn'
            node_conn.senddata('GETN','') 
            print "Wait 2 sec..."
            time.sleep(2)
            if not mutex_got:
                print "Mutex locked\n\n"
                break

            flag_start = True
            print "New clustering starting!!"
            central_server_ip=arguments[1]
            central_server_port=(arguments[2])
            print central_server_ip,central_server_port
            column_selected_string=arguments[3]
            column_selected=arguments[3].split(',')
            for i in range(len(column_selected)):
                column_selected[i]=int(column_selected[i])
            print column_selected
            num_of_cluster=arguments[4]

            multiple_domain_string=arguments[5]
            multiple_domain=arguments[5].split(',')
            
            print multiple_domain, len(multiple_domain),index_domain           

            #domain=arguments[5]
            domain=multiple_domain[index_domain]

            start_datetime=arguments[6]
            tmp=start_datetime.split(',')
            start_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
            end_datetime=arguments[7]
            tmp=end_datetime.split(',')
            end_time=datetime.datetime(eval(tmp[0]), eval(tmp[1]), eval(tmp[2]), eval(tmp[3]), eval(tmp[4]), eval(tmp[5]))
            
            
            
            threshold=float(arguments[8])
            print "DOLOG",log
            #result_file_name=arguments[9]
            
            global timestamp1,res_date
            timestamp1 = datetime.datetime(time.gmtime()[0],time.gmtime()[1],time.gmtime()[2],time.gmtime()[3],time.gmtime()[4],time.gmtime()[5])
            #print timestamp1
            res_date=str(time.gmtime()[0])+str(time.gmtime()[1])+str(time.gmtime()[2])+'_'+str(time.gmtime()[3])+str(time.gmtime()[4])
                
            
            
            
            
#===============================================================================================================
            
            # ----------------------------------------------------------
            #First step: get neigbor hood information
            
            if len(neighbor)==0:
                print "WARNING !! Haven't got any neighbor yet."
                print "I'll wait for someone else..."
                time.sleep(2)
                print "Unlocking the mutex"
                node_conn.senddata('FMUT','') 
                break
               
            tmp_neighbor=[]
            paras=[domain,[start_datetime,end_datetime]]
            for one in neighbor:
                tmp_neighbor.append(one)
            for one in tmp_neighbor:
                clustering_info=[paras,column_selected,threshold,log,res_date]
                data_to_send=[clustering_info,[one[0],one[1]]]
                str_to_send=str(data_to_send)
                print 'str INIT to send', str_to_send
                node_conn.senddata('INIT', str_to_send) 
            
            # ------------------------------------------------------
            # Second step: initialize clustering request to neigbors
  
            #database = open(database_filename,'r') 
            #print database_filename
            #print column_selected
            global load_time_input,session_start_time
            
            
            paras=[domain,[start_time,end_time],log]
            query_interface=Interface()
            #query_interface.updateData (paras)
            
            database=query_interface.SQqueryData (paras)
            database_lastrecord_len=len(database[0])
            #Database Updated
            print database,database_lastrecord_len
            #time.sleep(30)
            
            load_time_input=database[4]
            session_start_time=database[5]
            
            print "load_time -->",load_time_input
            
            database_input=[]
            for i in range (len(database[0])):
                tmp=[]
                for one in column_selected:
                    tmp.append(database[one][i])
                database_input.append(tmp)

            randominitial=random_initial()
            print "random initial",
            print randominitial
            #print "data_base_selected ----> ",database_input
            #print "The threshold is for the node",threshold
            node_one=kmeans_node(randominitial,database_input,threshold)
            
            
            
            
            # -----------------------------------------------------------
            # Third step, local node initialization

            centroids[0]=node_one.centroids[0]
            initial_state_string=str(randominitial)
            tmp_neighbor=[]
            for one in neighbor:
                tmp_neighbor.append(one)
            for one in tmp_neighbor:
                data_to_send=[randominitial,[one[0],one[1]]]
                node_conn.senddata('STAR', str(data_to_send))
                print 'str STAR to send', str(data_to_send)
            
            
            # ----------------------------------------------------
            # Forth step, send initialization info to neighbors    
            break
            
        print "Waiting slot --> ",slot
        time.sleep(1)
     
    print "OUT OF MAIN"
    
    
    
    
    
    
    
        
        
#===============================================================================================================    
peer.addhandler('NEBO',Store_NeighborInfo)
peer.addhandler('INIT',Get_Column_Selected)
peer.addhandler('STAR',get_initstate)
peer.addhandler('POLL',get_centroids)
peer.addhandler('RESP',store_neibor_centroids)
peer.addhandler('OLD ',dynamic_start)
peer.addhandler('ADDN',add_request_handler)
peer.addhandler('ADDI', add_reponse_handler)
peer.addhandler('TEST',conn_test)
peer.addhandler('LOST',node_lost_handler)
peer.addhandler('CHAN', up_to_date)
peer.addhandler('ENDD',ending)
peer.addhandler('MUTX',mutex_locking)

#===============================================================================================================
mainloop = threading.Thread( target = peer.mainloop, args = [node_conn] ) 
mainloop.start()
main()
