#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Python version: 3.6
# raw model  local no pretrain 加入multikrum
import copy
import os
import pickle
import itertools
import pandas as pd
import numpy as np
import torch

from utils.options import args_parser
from utils.train_utils import get_data, get_model
from models.Update import LocalUpdate
from models.test import test_img_local_all, test_img_avg_all, test_img_avg_part, test_img_ensemble_all, test_img_local,test_img

import pdb
os.environ["CUDA_VISIBLE_DEVICES"] = "0"

# seed = 255
#  58-》93
seed = 678 
# good 
# seed = 420
# 58-？93
# seed = 656 81 - 93
# seed = 365 50- 84 long
# seed = 108 59-92
# python ./main_sdm_new_np_mk256.py --dataset mnist --label_number 8 --DO_perlabel 4 --num_users 64 --model cnn  --num_classes 10 --local_ep_pretrain 80 --epochs 1000 --lr 0.01 --shard_per_user 2 --frac 0.5 --local_ep 2 --local_bs 10 --num_layers_keep 3 --results_save run106 --load_fed best_1200.pt --flag 2 --iid
np.random.seed(seed)
# cpu
torch.manual_seed(seed)
# gpu
torch.cuda.manual_seed_all(seed) 

if __name__ == '__main__':
    best_acc_test_avg = 0
    # parse args
    args = args_parser()
    args.device = torch.device('cuda:{}'.format(args.gpu) if torch.cuda.is_available() and args.gpu != -1 else 'cpu')
    print(args)
    # base_dir = './save/{}/{}_iid{}_num{}_C{}_le{}/shard{}/{}/'.format(
    #     args.dataset, args.model, args.iid, args.num_users, args.frac, args.local_ep, args.shard_per_user, args.results_save)
    # base_dir = './'
    # assert(len(args.load_fed) > 0)
    # base_save_dir = os.path.join(base_dir, 'lg/{}'.format(args.load_fed))
    # if not os.path.exists(base_save_dir):
    #     os.makedirs(base_save_dir, exist_ok=True)

    dataset_train, dataset_test, dict_users_train, dict_users_test = get_data(args)

    # dict_save_path = os.path.join(base_dir, 'dict_users.pkl')
    # with open(dict_save_path, mode='wb') as handle:
    #     pickle.dump(obj=(dict_users_train,dict_users_test),file=handle)
    # with open(dict_save_path, 'rb') as handle:
    #     dict_users_train, dict_users_test = pickle.load(handle)
    
    pretrain_ = True

    # build model
    print("Build Global model")
    net_glob = get_model(args)
    net_glob.train()
    # ori_model = net_glob.state_dict()
    glob = LocalUpdate(args=args, dataset=dataset_train, idxs=dict_users_train[64], pretrain = pretrain_)
    w_glob, loss = glob.train(net=net_glob.to(args.device), lr=args.lr)
    net_glob.load_state_dict(w_glob)
    a, b = test_img_local(net_glob, dataset_test, args, user_idx=64, idxs=dict_users_test[64])
    print("Init Globle Local Accu:{:.2f}".format(a))
    acc_test_avg, loss_test_avg = test_img(net_glob, dataset_test, args)
    print("Init Globle Accu:{:.2f}".format(acc_test_avg))



# layer数目
    total_num_layers = len(net_glob.weight_keys)
    # 第 n layer开始保存
    w_glob_keys = net_glob.weight_keys[total_num_layers - args.num_layers_keep:]
    w_glob_keys = list(itertools.chain.from_iterable(w_glob_keys))

    num_param_glob = 0
    num_param_local = 0
    for key in net_glob.state_dict().keys():
        # 本地参数加上glob参数数目
        num_param_local += net_glob.state_dict()[key].numel()
        if key in w_glob_keys:
        # glob参数加上glob参数数目
            num_param_glob += net_glob.state_dict()[key].numel()
    percentage_param = 100 * float(num_param_glob) / num_param_local
    print('# Params: {} (local), {} (global); Percentage {:.2f} ({}/{})'.format(
        num_param_local, num_param_glob, percentage_param, num_param_glob, num_param_local))

# init local model
    net_local_list = []
    pretrain_ = False
    
    for user in range(args.num_users):
        net_local = get_model(args)
        net_local.train()
        # net_local_list.append(copy.deepcopy(net_local))
        if(pretrain_ == True):
            local = LocalUpdate(args=args, dataset=dataset_train, idxs=dict_users_train[user], pretrain = pretrain_)
            w_local, loss = local.train(net=net_local.to(args.device), lr=args.lr)
        else:
            w_local = net_local.state_dict()

        # net_local.load_state_dict(w_local)
        # a, b = test_img_local(net_local, dataset_test, args, user_idx=user, idxs=dict_users_test[user])
        # print("Pretrain User[{}], Accu:{:.2f}".format(user,a))

        for k in w_glob_keys:
            w_local[k] = w_glob[k]

        net_local.load_state_dict(w_local)
        # a, b = test_img_local(net_local, dataset_test, args, user_idx=user, idxs=dict_users_test[user])
        # print("Pretrain User[{}], Accu:{:.2f}".format(user,a))

        net_local_list.append(copy.deepcopy(net_local))
    # generate list of local models for each user

    # net_local_list = []
    # for user in range(args.num_users):
    #     net_local_list.append(copy.deepcopy(net_glob))
    # acc_test_avg, loss_test_avg, aa = test_img_avg_part(net_glob, best_net_list, w_glob_keys, args, dataset_test, return_net=True)

    acc_test_avg, loss_test_avg = test_img_avg_all(net_glob, net_local_list, args, dataset_test)
    acc_test_local_list, _ = test_img_local_all(net_local_list, args, dataset_test, dict_users_test, return_all=True)
    acc_test_local = acc_test_local_list.mean()

    # training
    # results_save_path = os.path.join(base_save_dir, 'results.csv')
    results_columns = ['epoch', 'acc_test_local', 'acc_test_avg', 'best_acc_local', 'acc_test_ens_avg', 'acc_test_ens_maj']

    loss_train = []
    best_iter = -1
    best_acc_local = -1
    best_acc_list = acc_test_local_list
    best_net_list = copy.deepcopy(net_local_list)
    fina_net_list = copy.deepcopy(net_local_list)

    results = []
    results.append(np.array([-1, acc_test_local, acc_test_avg, acc_test_local, None, None]))
    print('Round {:3d}, Acc (local): {:.2f}, Acc (avg): {:.2f}, Acc (local-best): {:.2f}'.format(
        -1, acc_test_local, acc_test_avg, acc_test_local))
# ....................................
    for iter in range(args.epochs):
        w_glob = {}
        loss_locals = []
        m = max(int(args.frac * args.num_users), 1)
        idxs_users = np.random.choice(range(args.num_users), m, replace=False)
        w_keys_epoch = w_glob_keys

        

        w_local_list =[]
        # print(idxs_users)
        for idx in idxs_users:
            local = LocalUpdate(args=args, dataset=dataset_train, idxs=dict_users_train[idx])
            net_local = net_local_list[idx]
            w_local, loss = local.train(net=net_local.to(args.device), lr=args.lr)
            # a, b = test_img_local(net_local, dataset_test, args, user_idx=user, idxs=dict_users_test[user])
            # print("Global Train User[{}], Accu:{:.2f}".format(user,a))
            # print("w_local\n" , w_local)
            loss_locals.append(copy.deepcopy(loss))

            w_local_list.append(copy.deepcopy(w_local))


            # # sum up weights
            # if len(w_glob) == 0:
            #     w_glob = copy.deepcopy(w_local)
            # else:
            #     for k in w_keys_epoch:
            #         w_glob[k] += copy.deepcopy(w_local[k])
            #         # print("w_local[k]\n" , w_local[k].size())

        # ------ Multi Krum ------
        label_number = args.label_number
        output_perround = int(label_number*0.75)
        topn = output_perround
        dis_map = np.zeros(shape=(label_number,label_number))
        
        w_glob_label_list = []
        u = 0
        DO_perlabel = args.DO_perlabel

        w_glob_label = {}
        for j in range(label_number):
            for i in range(DO_perlabel):
                if len(w_glob_label) == 0:
                    w_glob_label = copy.deepcopy(w_local_list[u])
                    u+=1
                else:
                    for k in w_keys_epoch:
                        # print(u)
                        w_glob_label[k] += copy.deepcopy(w_local_list[u][k])
                    u+=1
            w_glob_label_list.append(copy.deepcopy(w_glob_label))
            w_glob_label = {}
        score = np.zeros(label_number)

        for i in range(label_number):
            for j in range(label_number):
                if i == j :
                    dis_map[i][j] = 0
                elif dis_map[i][j] == 0:
                    for k in w_keys_epoch:
                        dis_map[i][j] += torch.sum(torch.pow((w_glob_label_list[i][k] - w_glob_label_list[j][k]),2))
                    dis_map[j][i] = dis_map[i][j]
            dis_map[i] = np.sort(dis_map[i])        
            for k in range(topn):
                score[i] += dis_map[i][k]
        # print(score)
        w_glob = {}
        index = np.argsort(score)
        for i in range(output_perround):
            if len(w_glob) == 0:
                w_glob = copy.deepcopy(w_glob_label_list[index[i]])
            else:
                for k in w_keys_epoch: 
                    w_glob[k] += copy.deepcopy(w_glob_label_list[index[i]][k])

        # ------ Multi Krum End ------


        # get weighted average for global weights
        for k in w_keys_epoch:
            w_glob[k] = torch.div(w_glob[k], DO_perlabel*output_perround)

        if iter != args.epochs -1:
            # update MO weights
            w_net_glob = net_glob.state_dict()
            for k in w_keys_epoch:
                w_net_glob[k] = copy.deepcopy(w_glob[k])
            net_glob.load_state_dict(w_net_glob)

            w_glob, loss = glob.train(net=net_glob.to(args.device), lr=args.lr,flag=args.flag)
            net_glob.load_state_dict(w_glob)

            a, b = test_img_local(net_glob, dataset_test, args, user_idx=64, idxs=dict_users_test[64])
            print("Iter[{}], Global Local Accu:{:.2f}".format(iter,a))


            # copy weights to each local model
            for idx in range(args.num_users):
                net_local = net_local_list[idx]
                w_local = net_local.state_dict()
                for k in w_keys_epoch:
                    w_local[k] = copy.deepcopy(w_glob[k])
                net_local.load_state_dict(w_local)
        else:
            # update MO weights
            w_net_glob = net_glob.state_dict()
            for k in w_keys_epoch:
                w_net_glob[k] = copy.deepcopy(w_glob[k])
            net_glob.load_state_dict(w_net_glob)

            w_glob, loss = glob.train(net=net_glob.to(args.device), lr=args.lr,flag=20)
            net_glob.load_state_dict(w_glob)

         



        acc_test_avg, loss_test_avg = test_img(net_glob, dataset_test, args)
        print("Iter[{}], Global Accu:{:.2f}".format(iter,acc_test_avg))

        if best_acc_test_avg < acc_test_avg:
            best_acc_test_avg = acc_test_avg

    

        

    print('Best model acc (avg): {}'.format(best_acc_test_avg))
 
    
