import torch
import random
import numpy as np
import copy
import math
from net import SIMPLE_NET, CNN, MULTI_LAYER, CIFAR10_CNN
from train_model import TrainModel
from client import Client
from load_partition_data import load_partition_data


def add_noise(data, u = 0, b = 1):
    noise = np.random.laplace(u, b, 1)
    return noise + data

def get_intersection(hash_label_list1, hash_label_list2, client):
    hash_intersection = []
    intersection_label = []
    intersection = []
    j = 0
    flag1 = 0
    for i in range(len(hash_label_list1)):
        while j < len(hash_label_list2):
            if hash_label_list1[i] == hash_label_list2[j]:
                hash_intersection.append(hash_label_list2[j])
                j += 1
                flag1 = j
                break
            else:
                j += 1
        if flag1 > 0:
            j = flag1 #如果有交集，就将j置为flag1
        else:
            j = 0 #如果没有交集，就将j重置为0,再开始下一轮
    
    l = 0
    flag2 = 0
    for k in range(len(hash_intersection)):
        while l < len(hash_label_list1):
            if hash_intersection[k] == hash_label_list1[l]:
                intersection_label.append(l)
                l += 1
                flag2 = l
                break
            else:
                l += 1
        if flag2 > 0:
            l = flag2
        else:
            l = 0
    for l in intersection_label:
        intersection.append(client.label_list[l])
    return intersection

def get_weight_matrix(dict):
    total = 0
    weight_matrix = []
    for i in dict:
        total += dict[i]
    for i in dict:
        weight_matrix.append(dict[i] / total)
    return weight_matrix

def each_client_cal_noniid(weight_matrix, one_minus_r_matrix):
    noniid = 0
    for i in range(len(weight_matrix)):
        noniid += (weight_matrix[i] * one_minus_r_matrix[i])
    return noniid

def cal_noniid(intersection, client_psi_obj_list, class_num = 10):
    class_num = class_num # mnist has 10 classes(0, 1, 2, ..., 9)
    total_round = 10
    one_minus_r_matrix = []

    weight_matrix1 = get_weight_matrix(dict = client_psi_obj_list[0].each_label_num_dict)
    weight_matrix2 = get_weight_matrix(dict = client_psi_obj_list[1].each_label_num_dict)
    # print(intersection)
    if intersection != []:
        for intsec in intersection:
            r = 0 #r means n/n
            for round_num in range(total_round):
                b1 = client_psi_obj_list[0].each_label_num_dict[intsec] / 10
                b2 = client_psi_obj_list[1].each_label_num_dict[intsec] / 10
                abs_noise_num1 = abs(add_noise(data = client_psi_obj_list[0].each_label_num_dict[intsec], u = 0, b = b1))
                abs_noise_num2 = abs(add_noise(data = client_psi_obj_list[1].each_label_num_dict[intsec], u = 0, b = b2))
                if abs_noise_num1 == abs_noise_num2 == 0:
                    r += 0
                elif abs_noise_num1 > abs_noise_num2:
                    r += (abs_noise_num2 / abs_noise_num1)
                else:
                    r += (abs_noise_num1 / abs_noise_num2)
            avg_r = r / total_round
            one_minus_r = 1 - avg_r
            one_minus_r_matrix.append(one_minus_r)
        noniid1 = each_client_cal_noniid(weight_matrix = weight_matrix1, one_minus_r_matrix = one_minus_r_matrix)
        noniid2 = each_client_cal_noniid(weight_matrix = weight_matrix2, one_minus_r_matrix = one_minus_r_matrix)
        noniid = (noniid1 + noniid2) / 2
    else:
        noniid = 1
    return noniid

def cal_epoch_by_noniid(avg_noniid):
    pass

def client_sampling(train_sample_num, round_idx, total_num, psi_sample_num = 2):
    if train_sample_num == total_num:
        train_client_indexes = [client_index for client_index in range(total_num)]
    elif train_sample_num < total_num:
        seed = random.seed(round_idx)
        train_client_indexes = random.sample(range(total_num), train_sample_num)
    else:
        raise Exception('sample_num bigger than total_num is not allowed')
    psi_client_indexes = random.sample(range(total_num), psi_sample_num)
    print('train_client_indexes:', train_client_indexes, 'psi_client_indexes:', psi_client_indexes)
    return train_client_indexes, psi_client_indexes

def create_client(client_num, train_data, test_data, data_num, trainmodel, psi = False, pipe_conn = None):
    client_obj_list = []
    for idx in range(client_num):
        client = Client(client_idx = idx, client_train_data = train_data[idx], client_test_data = test_data[idx],
                        client_data_num = data_num[idx], model = trainmodel, psi = psi, pipe_conn = pipe_conn)

        client_obj_list.append(client)

    return client_obj_list

def aggregate(w_local):
    training_num = 0
    for i in range(len(w_local)):
        num, _ = w_local[i]
        training_num += num
    _, avg_params = w_local[0]
    for i in range(len(w_local)):
        data_num, local_params = w_local[i]
        weight = data_num / training_num
        for key in avg_params:
            if i == 0:
                avg_params[key] = local_params[key] * weight
            else:
                avg_params[key] += local_params[key] * weight

    return avg_params

def train(batch_size, client_num_per_round, round_num, lr, Train_model, epoch, psi:bool, degree_of_noniid, mode,psi_num_per_round = 2):
    # Train_model = Train_model

    # batch_size = batch_size
    # data_dir = data_dir
    # user_num = user_num
    # epoch = epoch

    I = []
    Non_iid = []

    total_client_num, train_data_dict, test_data_dict, each_client_data_num_dict = load_partition_data(batch_size = batch_size, degree_of_noniid = degree_of_noniid,
                                                                                     mode = mode)

    client_train_obj_list = create_client(client_num = client_num_per_round, train_data = train_data_dict, test_data = test_data_dict,
                                data_num = each_client_data_num_dict, trainmodel = Train_model)
    for client in client_train_obj_list:
        client.get_label()
        print(client.each_label_num_dict)
    client_psi_obj_list = create_client(client_num = psi_num_per_round, train_data = train_data_dict, test_data = test_data_dict,
                                data_num = each_client_data_num_dict, trainmodel = Train_model, psi = psi)

    test_clients(total_client_num = total_client_num, client_obj = client_train_obj_list[0], train_data = train_data_dict, 
                        test_data = test_data_dict, each_client_data_num = each_client_data_num_dict)

    w_global = Train_model.get_model_params()

    counter = 0

    for i in range(total_client_num - 1):
        client_psi_obj_list[0].update_local_dataset(client_idx = i, train_data = train_data_dict[i], test_data = test_data_dict[i],
                                         data_num = each_client_data_num_dict[i])
        client_psi_obj_list[0].get_label()
        print(i, client_psi_obj_list[0].label_list)
        hash_label_list1 = client_psi_obj_list[0].get_hash_label()
        for j in range(i + 1, total_client_num):
            client_psi_obj_list[1].update_local_dataset(client_idx = j, train_data = train_data_dict[j], test_data = test_data_dict[j],
                                            data_num = each_client_data_num_dict[j])
            client_psi_obj_list[1].get_label()
            print(j, client_psi_obj_list[1].label_list)
            hash_label_list2 = client_psi_obj_list[1].get_hash_label()
            intersection = get_intersection(hash_label_list1 = hash_label_list1, hash_label_list2 = hash_label_list2, client = client_psi_obj_list[0])
            noniid = cal_noniid(intersection, client_psi_obj_list)
            I.append(intersection)
            Non_iid.append(noniid)
            counter += 1
    avg_noniid = sum(Non_iid) / counter
    print(avg_noniid)
    # epoch = cal_epoch_by_noniid(avg_noniid = avg_noniid)

    # for round_idx in range(round_num):
    #     print('-------------------------------------------round:', round_idx)
    #     w_local = []
    #     train_client_indexes, psi_client_indexes = client_sampling(train_sample_num = client_num_per_round, round_idx = round_idx, total_num = total_client_num)

    #     for idx, client in enumerate(client_train_obj_list):
    #         train_client_idx = train_client_indexes[idx]
    #         client.update_local_dataset(client_idx = train_client_idx, train_data = train_data_dict[train_client_idx], test_data = test_data_dict[train_client_idx],
    #                                      data_num = each_client_data_num_dict[train_client_idx])

    #         w = client.train(lr = lr, epoch = epoch)
    #         w_local.append((client.get_train_num(), copy.deepcopy(w)))
        
    #     w_global = aggregate(w_local = w_local)
    #     # client.model.set_model_params(w_global)
    #     Train_model.set_model_params(w_global)
    #     for client in client_train_obj_list:
    #         client.update_model(model = Train_model)
            
    #     test_clients(total_client_num = total_client_num, client_obj = client_train_obj_list[0], train_data = train_data_dict, 
    #                             test_data = test_data_dict, each_client_data_num = each_client_data_num_dict)
                            
def test_clients(total_client_num, client_obj, train_data, test_data, each_client_data_num):
    train_metrics = {
            'num_samples': [],
            'num_correct': [],
            'losses': []
        }
    test_metrics = {
        'num_samples': [],
        'num_correct': [],
        'losses': []
    }

    for idx in range(total_client_num):
        client_obj.update_local_dataset(client_idx = idx, train_data = train_data[idx], test_data = test_data[idx], data_num = each_client_data_num[idx])
        
        train_local_metrics = client_obj.test(mode = 'train')
        train_metrics['num_samples'].append(copy.deepcopy(train_local_metrics['test_total']))
        train_metrics['num_correct'].append(copy.deepcopy(train_local_metrics['test_correct']))
        train_metrics['losses'].append(copy.deepcopy(train_local_metrics['test_loss']))

        test_local_metrics = client_obj.test(mode = 'test')
        test_metrics['num_samples'].append(copy.deepcopy(test_local_metrics['test_total']))
        test_metrics['num_correct'].append(copy.deepcopy(test_local_metrics['test_correct']))
        test_metrics['losses'].append(copy.deepcopy(test_local_metrics['test_loss']))

        print('client_idx', idx, 'test_total = ', test_local_metrics['test_total'],\
                'test_correct', test_local_metrics['test_correct'], 'test_loss', test_local_metrics['test_loss'])
        # test on training dataset
    train_acc = sum(train_metrics['num_correct']) / sum(train_metrics['num_samples'])
    train_loss = sum(train_metrics['losses']) / sum(train_metrics['num_samples'])

    # test on test dataset
    test_acc = sum(test_metrics['num_correct']) / sum(test_metrics['num_samples'])
    test_loss = sum(test_metrics['losses']) / sum(test_metrics['num_samples'])

    train_stats = {'training_acc': train_acc, 'training_loss': train_loss}
    test_stats = {'test_acc': test_acc, 'test_loss': test_loss}
    print(train_stats, '\n',test_stats)
    

if __name__ == '__main__':
    Net = CIFAR10_CNN()
    #初始化训练模型，所有的client都用这个模型
    Train_model = TrainModel(Net)

    epoch = 1
    batch_size = 16
    client_num_per_round = 5
    round_num = 100
    learning_rate = 0.02
    #if data_dir = 'total', then choose user_num = 10 or 100 or 500, if data_dir = 'original', the user_num is not need(it means you can choose any number)
    psi = True
    degree_of_noniid = '90'
    mode = 'cifar10'

    train(batch_size = batch_size, client_num_per_round = client_num_per_round, round_num = round_num, lr = learning_rate, Train_model = Train_model,
            epoch = epoch, psi = psi, degree_of_noniid = degree_of_noniid, mode = mode)
    print('Done!')