from peer import BTPeer
from peer import Node_failed
from distributed_kmeans import kmeans_node
import threading
import time
import random
import sys
import os
import socket

arguments=sys.argv
if len(arguments)!=8:
   print "the program is terminated because input parameters are not efficient"
   print "five parameters are required: threshold, neighborhood information, file name of local database, selected column numbers,file name where you want store your result"
   sys.exit(0)

threshold=float(arguments[6])
neighbor_filename=arguments[2]
database_filename=arguments[3]
column_selected_string=arguments[4]
column_selected=arguments[4].split(',')
for i in range(len(column_selected)):
    column_selected[i]=int(column_selected[i])
print column_selected
result_file_name=arguments[7]
num_of_cluster=arguments[5]
listenport=arguments[1]
node_pool=[]
#filename=arguments[8]
#ip=arguments[7]

s = socket.socket( socket.AF_INET, socket.SOCK_STREAM ) 
s.connect( ( "www.google.com", 80 ) ) 
local_ip = s.getsockname()[0] 
s.close() 

#neighbor_filename='master_neighbor'
neighbor_file = open(neighbor_filename,'r') 
neighbor=[]
for line in neighbor_file.readlines():
    tmp=line.split()
    element=[tmp[0],eval(tmp[1])]
    neighbor.append(element)
    node_pool.append(element)
neighbor_file.close()  


#neighbor=[['127.0.0.1',10001],['127.0.0.1',10002]]#address information of neighbors in format of [ip,port] 
#database_filename='database_adsl.txt'#name of local database in .txt
#threshold=0.02
#initial_state=[[0.66666666666666663, 0.034270867900704972, 0.20071472031246795, 0.099449826575768452, 0.039685039370078744], [1.0, 0.033101148513569295, 0.19012375663413314, 0.034740277662774967, 0.0], [1.0, 0.66540877856108016, 0.070473013406133142, 0.11902031409566394, 0.10723404255319149], [0.0, 0.0091420636911972442, 0.026809941913592768, 0.090849588158522496, 0.04581818181818182]]#initial state
#three inputes required for peer master


min_database=[]
max_database=[]
peer=[] # store the information (ip,port) of peers 
global_min=[]
global_max=[]
initial_state=[]
neighbor_centroids=[]
database_adsl_input=[]
peer_master=None
wait_counts={}
maxi_wait=2
database_last_record_time=os.stat(database_filename).st_mtime
ITERATION=1
virtual_centroids_count=0
centroids_counts={}
centroids={}
poll_iteration_record=0
pollsent=0
adding_history={}
calculate_centroids_count=0
collect_result_check=True
message_pool={}  


for one in neighbor:
    neighbor_centroids.append({})


'''def store_min_max(BTconnection,msgdata):
    
    min_max= [float(n) for n in msgdata.split(',')]
    mini=[]
    maxi=[]
    length=len(min_max)
    count=0
    for data in min_max:
        if count<(0.5*length):
           mini.append(data)
        else:
           maxi.append(data)
        count=count+1
    min_database.append(mini)
    max_database.append(maxi)
    
def global_min_max():
    # calculate global min and max 
    mini=[] # place result of global mini here
    dimension=len(min_database[0])
    for i in range(dimension):
        tmp=[]
        for one in min_database:
            tmp.append(one[i])
        mini.append(min(tmp))
    global global_min
    global_min=mini
    maxi=[] # place result of global maxi here
    for i in range(dimension):
        tmp=[]
        for one in max_database:
            tmp.append(one[i])
        maxi.append(max(tmp))
    global global_max
    global_max=maxi
    min_max=mini+maxi
    min_max_string = ','.join(str(n) for n in min_max)
    
    return min_max_string'''

def get_centroids(BTconnection,data):
    peercon=BTconnection
    num_of_iteration=int(data)
    global poll_iteration_record
    poll_iteration_record=num_of_iteration
    #if node_master != None:
       #length=len(node_master.centroids_counts)
    itera_str=str(num_of_iteration)
    if node_master == None:
       response=itera_str+'wait'
       peercon.senddata('RESP', response)
    else:
       #node_master.terminate_check(threshold)
       if(len(node_master.centroids)>1 and node_master.termination):
          tmp_centroids=node_master.centroids_counts.pop()
          node_master.centroids_counts.append(tmp_centroids)
          centroids_string = str(tmp_centroids) 
          response=itera_str+centroids_string
          peercon.senddata('RESP', response)
       else:
          if not centroids_counts.has_key(num_of_iteration-1):
             response=itera_str+'wait'
             peercon.senddata('RESP', response)
          else:
             centroids=centroids_counts[num_of_iteration-1]
             centroids_string = str(centroids) 
             response=itera_str+centroids_string
             peercon.senddata('RESP', response)
       
def sendPoll():
    
    while(True):
       global pollsent, virtual_centroids_count, ITERATION, node_master
       if node_master == None:
          continue
       else:
          #node_master.terminate_check(threshold)
          if(len(node_master.centroids)>1 and node_master.termination):
             #print "while loop is broken because of sendpoll"
             #break
             continue
          else:
             if (virtual_centroids_count == ITERATION):
                #print pollsent
                iteration=virtual_centroids_count
                if iteration > pollsent: 
                   print 'inside sendpoll'
                   print iteration
                   for peer in neighbor:
                       '''try:
                          s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                          s.connect((peer[0],peer[1]))
                          s.close()
                       except:
                          print 'fail to connect to the peer'
                          neighbor.remove(peer)
                          print neighbor
                          pass
                          return
                       else:'''
                       try:
                           peer_master.connectandsend(peer[0], peer[1], 'POLL', str(iteration) , pid=None, waitreply=True )
                       except Node_failed:
                           print "node", 
                           print peer[0]
                           print peer[1]
                           print "failed"
                           global neighbor
                           neighbor.remove([peer[0],peer[1]])
                           print neighbor
                       #peer_master.connectandsend(peer[0], peer[1], 'POLL', str(iteration) , pid=None, waitreply=True )
                   pollsent=iteration
       

def addCentroids():
    count=0
    while(True):
       if node_master==None:
          continue
       else:
          #node_master.terminate_check(threshold)
          if(len(node_master.centroids)>1 and node_master.termination):
             #print "while loop is broken because of addcentroids"
             #break
             continue
          else:
             global ITERATION,centroids,centroids_counts, neighbor
             itera=str(ITERATION)
             neighbor_data=[]
             count=0
             num_of_neighbor=len(neighbor)
             for one in neighbor_centroids:
                 if one.has_key(itera):
                    count=count+1
                    neighbor_data.append(one[itera])
                    #print count
             if count==num_of_neighbor  and centroids_counts.has_key(eval(itera)-1):
                neighbor_data.append(centroids_counts[eval(itera)-1])
                node_master.weighted_centroid_calculate(neighbor_data,num_of_neighbor)
                centroids[eval(itera)]=node_master.centroids.pop()
                node_master.centroids.append(centroids[eval(itera)])
                ITERATION=ITERATION+1
                node_master.terminate_check(threshold)
                print node_master.termination
                count=count+1
                print 'adding centroids in iteration',
                print node_master.centroids
       
       

def store_neibor_centroids(BTconnection,data):
    peercon=BTconnection
    neighbor_ip,neighbor_port=peercon.s.getpeername()
    #print type(neighbor_ip)
    if 'wait' in data:
       time.sleep(5)
       data_list=data.split('w',1)
       itera=data_list[0]
       if wait_counts.has_key(itera):
           wait_counts[itera]=wait_counts[itera]+1
       if not wait_counts.has_key(itera):
           wait_counts[itera]=1
       if wait_counts[itera]<= maxi_wait:
           try:
              s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
              s.connect((neighbor_ip,neighbor_port))
           except:
                  print 'fail to connect to the peer'
                  neighbor.remove([neighbor_ip,neighbor_port])
                  print neighbor
                  return
           else:
              s.close()
              peer_master.connectandsend(neighbor_ip, neighbor_port, 'POLL', str(itera) , pid=None, waitreply=True )
       
    else:
       data_list=data.split('[',1)
       length=len(data)
       itera=data_list[0]
       centroids_str='['+data_list[1]
       tmp_centroids=eval(centroids_str)
       count=0
       for one in neighbor:
           if one[0]==neighbor_ip and one[1]==neighbor_port:
              break  
           count=count+1
       neighbor_centroids[count][itera]=tmp_centroids
    #print neighbor_centroids
         
    

def calculate_centroids():
    global calculate_centroids_count
    
    while(True):
       if node_master == None:
          continue
       else:
          global virtual_centroids_count, ITERATION, centroids_count,centroids, adding_history,collect_result
          #node_master.terminate_check(threshold)
          if(len(node_master.centroids)>1 and node_master.termination and ITERATION ==virtual_centroids_count):
             #print "while loop is broken because of calculate_centroids"
             #break
             continue
          else:
             
             if virtual_centroids_count < ITERATION:#len(node_master.centroids):
                if virtual_centroids_count >= calculate_centroids_count:
                   itera=virtual_centroids_count
                   
                   
                   new_centroids=node_master.one_round_kmeans(centroids[itera])
                   node_master.local_centroid_counts(new_centroids)
                   centroids_counts[itera]=node_master.centroids_counts.pop()
                   print centroids_counts
                   node_master.centroids_counts.append(centroids_counts[itera])
                   virtual_centroids_count=virtual_centroids_count+1
                   
                   if adding_history.has_key(ITERATION):
                       for one in adding_history[ITERATION]:
                           neighbor.append(one)
                           neighbor_centroids.append({})
                       print "successfully adding neighbor"
                       print neighbor    
                   #print node_master.centroids_counts
                   calculate_centroids_count=calculate_centroids_count+1
                   print "iteration"
                   print ITERATION
                   print "calculate_centroids"
                   print virtual_centroids_count
                   print node_master.termination
                   print collect_result_check
def collect_result():
    global collect_result_check
    while (True):
       if node_master==None:
          continue
       else:
          if (len(node_master.centroids)>1 and node_master.termination and ITERATION == virtual_centroids_count and collect_result_check == True):
            
             final=len(node_master.centroids_counts)-1
             cluster=node_master.centroids_counts[final]
             '''denormalization = lambda x,y,z : x*(y-z)+z
             denormalized_final_cluster=[]
             for one in cluster:
                 tmp=map(denormalization,one[0],global_max,global_min)
                 denormalized_final_cluster.append(tmp)
             #filename="node_master_result.txt"'''
             FILE=open(result_file_name,"w")
             tmp_string="final cluster is"
             FILE.write(tmp_string+'\n')
             FILE.write(str(cluster)+'\n')
             node_master_cluster,node_master_distance,node_master_clustered=node_master.final_cluster();
             num=len(node_master_cluster)
             #print num
             #print len(database_adsl_input)
             for i in range(num):
                 tmp_str=str(database_adsl_input[i])+'   '+str(node_master_cluster[i])+'   '+str(node_master_distance[i])+'\n'
                 FILE.write(tmp_str)
             #FILE.write(str(node_master_cluster)+'\n')
             #FILE.write('the distance of every node to its centre'+'\n')
             #FILE.write(str(node_master_distance)+'\n')
             #FILE.write('lists of nodes in different cluster'+'\n')
             #FILE.write(str(node_master_clustered)+'\n')
             FILE.close()
             global centroids, centroids_counts
             print "final centroids and centroids_counts"
             print centroids
             print centroids_counts
             collect_result_check=False
         
def database_check():
    while(True):
        global database_last_record_time, node_master,collect_result_check
        database_lastchange_time=os.stat(database_filename).st_mtime
        #print database_last_record_time
        #print database_lastchange_time
        if (database_lastchange_time==database_last_record_time):
            continue
        else:
            
            database = open(database_filename,'r') 
            database_input=[]
            for line in database.readlines():
                tmp=line.split()
                str2float=[float(x) for x in tmp] 
                selected_data=[]
            for one in column_selected:
                selected_data.append(str2float[one])
            database_input.append(selected_data)
            database.close()   # node_master database
            node_master.database_change(database_input)
            
            #print database_last_record_time
            database_last_record_time=database_lastchange_time
           #print database_last_record_time
            # Reset ITERATION and virtual_centroids_count, let node becomes active again
            ITERATION=poll_iteration_record
            virtual_centroids_count=ITERATION
            print virtual_centroids_count
            print ITERATION
            #new start state
            tmp_centroids=node_master.centroids.pop()
            node_master.centroids.append(tmp_centroids)
            new_centroids=node_master.one_round_kmeans(tmp_centroids)
            node_master.local_centroid_counts(new_centroids)
            centroids_counts[ITERATION-1]=node_master.centroids_counts.pop()
            node_master.termination=False
            global collect_result_check
            collect_result_check=True
            for peer in neighbor:
                peer_master.connectandsend(peer[0], peer[1], 'CHAN', str([local_ip,eval(listenport)])+';'+str([local_ip,eval(listenport)])+';'+'UPDATING'  , pid=None, waitreply=False)
            global message_poll
            message_pool[local_ip]='UPDATING'

def up_to_date(BTconnection,data):
    message=data.split(';')
    meesage_source=eval(message[0])
    changing_source=eval(message[1])
    changing_reason=message[2]
    print meesage_source
    print changing_source
    print changing_reason
    
    if message_pool.has_key(changing_source[0]) and (changing_reason in message_pool[changing_source[0]]):
        print "already saw this message"
        pass
    else:
        ITERATION=poll_iteration_record
        virtual_centroids_count=ITERATION
        #print virtual_centroids_count
        #print ITERATION
        #new start state
        tmp_centroids=node_master.centroids.pop()
        node_master.centroids.append(tmp_centroids)
        new_centroids=node_master.one_round_kmeans(tmp_centroids)
        node_master.local_centroid_counts(new_centroids)
        centroids_counts[ITERATION-1]=node_master.centroids_counts.pop()
    
        global neighbor
        print neighbor
        #print source_ip
        #print source_port
        global message_pool
    
 
        node_master.termination=False
        global collect_result_check
        collect_result_check=True
        neighbor_rest=[]
        for one in neighbor:
            neighbor_rest.append(one)
        neighbor_rest.remove(meesage_source)
        print neighbor_rest
        for peer in neighbor_rest:
            peer_master.connectandsend(peer[0], peer[1], 'CHAN', str([local_ip,eval(listenport)])+';'+str(changing_source)+';'+changing_reason, pid=None, waitreply=False)
        message_pool[changing_source[0]]=changing_reason 
        
def add_to_pools(BTconnection,data):
    peercon=BTconnection
    #ip_port=data.split(' ',1)
    ip,port=peercon.s.getpeername()
    port=eval(data)
    global node_pool
    length=len(node_pool)
    first=random.randint(0, length-1)
    while (True):
        second=random.randint(0, length-1)
        if second != first:
            break
    str_to_send=str(node_pool[first])+';'+str(node_pool[second])
    print str_to_send
    peer_master.connectandsend(ip, port, 'ADDR', str_to_send, pid=None, waitreply=False)
    node_pool.append([ip,port])
    print "node_pool"
    print node_pool

def add_request_handler(BTconnection,data):
    peercon=BTconnection
    #ip_port=data.split(' ',1)
    ip,port=peercon.s.getpeername()
    port=eval(data)
    global neighbor,ITERATION,centroids,node_master,virtual_centroids_count,pollsent,centroids_counts,calculate_centroids_count, collect_result_check
    #neighbor.append([ip,port,ITERATION+1])
    if not node_master.termination:
        response=str(ITERATION)+';'+str(centroids[ITERATION-1])+';'+ str(column_selected)
    else:
        last_iteration=max(k for k, v in centroids.iteritems())
        last_centroids=centroids[last_iteration]
        response=str(poll_iteration_record)+';'+str(last_centroids)+';'+str(column_selected)
        
        ITERATION=poll_iteration_record
        virtual_centroids_count=ITERATION-1
        pollsent=ITERATION-1
        #print "pollsent"
        #print pollsent
        calculate_centroids_count=ITERATION-1
        collect_result_check=True
        #print ITERATION
        #print neighbor_centroids
        for one in neighbor_centroids:
            if one.has_key(str(ITERATION)):
                one.pop(str(ITERATION))
        #print neighbor_centroids
        '''tmp_centroids=node_master.centroids.pop()
        node_master.centroids.append(tmp_centroids)
        new_centroids=node_master.one_round_kmeans(tmp_centroids)
        node_master.local_centroid_counts(new_centroids)
        centroids_counts[ITERATION-1]=node_master.centroids_counts.pop()
        node_master.termination=False'''
        
        tmp_centroids=node_master.centroids.pop()
        node_master.centroids.append(tmp_centroids)
        centroids[virtual_centroids_count]=tmp_centroids
        node_master.termination=False
        
    neighbor_rest=neighbor

    print neighbor_rest
    for peer in neighbor_rest:
        peer_master.connectandsend(peer[0], peer[1], 'CHAN', str([local_ip,eval(listenport)])+';'+str([ip,port])+';'+'ADDING', pid=None, waitreply=False)
    
    global message_pool
    message_pool[ip]='ADDING'
    
    global adding_history
    if adding_history.has_key(ITERATION):
        adding_history[ITERATION].append([ip,port])
    else:
        adding_history[ITERATION]=[[ip,port]]
        
    peer_master.connectandsend(ip, port, 'ADDI', response, pid=None, waitreply=False)

peer_master=BTPeer(5,listenport,None,local_ip);

#master node send initial message to get local max and min values
#peer_master.addhandler('INIT',store_min_max)
peer_master.addhandler('POLL',get_centroids)
peer_master.addhandler('RESP',store_neibor_centroids)
peer_master.addhandler('CHAN', up_to_date)
peer_master.addhandler('ADD ', add_to_pools)
peer_master.addhandler('ADDN', add_request_handler)

# DATABASE PREPARATION
database = open(database_filename,'r') 
print database_filename
print column_selected
global column_selected
database_adsl_input=[]
for line in database.readlines():
    tmp=line.split()
    str2float=[float(x) for x in tmp] 
    selected_data=[]
    
    for one in column_selected:
        selected_data.append(str2float[one])
        
    database_adsl_input.append(selected_data)

database.close()

'''mini=[]
maxi=[]
for one in range(len(database_adsl_input[0])):
    tmp=[]
    for data in database_adsl_input:
        tmp.append(data[one])
    mini.append(min(tmp))
    maxi.append(max(tmp))
min_database.append(mini)
max_database.append(maxi)'''

num_of_peers=len(neighbor)
for one in neighbor:
    neighbor_ip=one[0]
    neighbor_host=one[1]
    peer_master.connectandsend(one[0], one[1], 'INIT', column_selected_string, pid=None, waitreply=None )

#if len(mi_database)==len(peer) check whether all the peers have returned their local mini and maxi
#global_min,global_max=global_min_max()
#while(True):
'''if len(min_database)==num_of_peers+1:
      break

#STEP TWO return global min and maxi to all nodes
for one in neighbor:
    neighbor_ip=one[0]
    neighbor_host=one[1]
    peer_master.connectandsend(one[0], one[1], 'GLOB', global_min_max(), pid=None, waitreply=None )

print global_min_max()
#sending initial state to all nodes'''

'''def normalize_initial(element, maxi, mini):
    return (element-mini)/(maxi-mini)

global num_of_cluster
database_length=len(database_adsl_input)
randominitial=[]
print num_of_cluster
for i in range(eval(num_of_cluster)):
    index=random.randint(0, database_length-1)
    tmp=map(normalize_initial,database_adsl_input[index],global_max,global_min)
    randominitial.append(tmp)'''

def random_initial():
    global num_of_cluster,database_adsl_input
    database_length=len(database_adsl_input)
    randominitial=[]
    #print num_of_cluster
    for i in range(eval(num_of_cluster)):
        index=random.randint(0, database_length-1)
        randominitial.append(database_adsl_input[index])
    return randominitial
        
global centroids
randominitial=random_initial()
print randominitial
node_master=kmeans_node(randominitial,database_adsl_input,threshold)

'''centroids[0]=node_master.centroids[0]
normalized_file=open(filename,'w');
for element in node_master.normaized_database:
    for one in element:
        normalized_file.write(str(one)+" ")
    normalized_file.write('\n')'''

centroids[0]=node_master.centroids[0]
initial_state_string=str(randominitial)
for one in neighbor:
    neighbor_ip=one[0]
    neighbor_host=one[1]
    peer_master.connectandsend(one[0], one[1], 'STAR', initial_state_string, pid=None, waitreply=None )


#self instantiated



mainloop = threading.Thread( target = peer_master.mainloop, args = [] ) 
calculating=threading.Thread(target = calculate_centroids, args = [] ) 
sendpoll=threading.Thread(target = sendPoll, args = [] )
addcentroids=threading.Thread(target=addCentroids,args=[])
output_result=threading.Thread(target=collect_result,args=[])

mainloop.start()
calculating.start()
sendpoll.start()
addcentroids.start()
output_result.start()
#peer_master.connectandsend('localhost', 10000, 'POLL', '1', pid=None, waitreply= True )



