import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torch import nn 
import copy
import numpy as np
from models.Update import DatasetSplit
from utils.save_result import save_result
from models.aggregation import *
from models.test import test_img
from models.test import test_img_avg
from torch.autograd import Variable
import wandb
def KD(input_p,input_q,T = 1):
    kl_loss = nn.KLDivLoss(reduction="batchmean")
    p = F.log_softmax(input_p/T,dim=1)
    q = F.softmax(input_q/T,dim=1)
    result = kl_loss(p,q)
    return result

class LocalUpdate_FedCom2(object):
    def __init__(self,args,dataset=None,idxs = None):
        self.args = args
        self.loss_func = nn.CrossEntropyLoss()
        self.ldr_train = DataLoader(DatasetSplit(dataset,idxs),self.args.local_bs,shuffle=True)
    def train(self,round,net):
        net.train()
        if self.args.optimizer == 'sgd':
            optimizer = torch.optim.SGD(net.parameters(), lr=self.args.lr*(self.args.lr_decay**round),
                                        momentum=self.args.momentum,weight_decay=self.args.weight_decay)
        elif self.args.optimizer == 'adam':
            optimizer = torch.optim.Adam(net.parameters(), lr=self.args.lr)
        epoch_loss = []
        for iter in range(self.args.local_ep):
            batch_loss = []
            for batch_idx,(images,labels) in enumerate(self.ldr_train):
                images,labels = images.to(self.args.device),labels.to(self.args.device)
                net.zero_grad()
                out_of_local = net(images)
                log_probs = out_of_local['output']
                loss = self.loss_func(log_probs,labels)
                loss.backward()
                optimizer.step()

                batch_loss.append(loss.item())
            epoch_loss.append(sum(batch_loss)/len(batch_loss))

        return net.state_dict()
#######
class  SeverUpdate_FedCom2(object):
    def __init__(self,args,net_s, net_t,round,dataset_global=None,dict_global = None):
        self.args = args
        self.loss_func = nn.CrossEntropyLoss()
        self.num_classes = args.num_classes
        
        self.gdr_train = DataLoader(DatasetSplit(dataset_global,dict_global),batch_size=self.args.sever_bs,shuffle=True)
        self.alpha = args.alpha
        self.net_s= net_s.to(self.args.device)
        self.net_t = net_t.to(self.args.device)
            
            
        if self.args.optimizer == 'sgd':
            self.optimizer = torch.optim.SGD(self.net_s.parameters(), lr=self.args.lr*(self.args.lr_decay**round),
                                    momentum=self.args.momentum,weight_decay=self.args.weight_decay)
        elif self.args.optimizer == 'adam':
            self.optimizer = torch.optim.Adam(self.net_s.parameters(), lr=self.args.lr)
              

    def train(self):
        self.net_s.train()
        epoch_loss = []
        for iter in range(self.args.sever_ep):
            batch_loss = []
            for batch_idx,(images,labels) in enumerate(self.gdr_train):
                images,labels = images.to(self.args.device),labels.to(self.args.device)

                # out_t = self.net_t(images)
                out_s = self.net_s(images)

                kl_loss = KD(out_s['output'],self.net_t(out_s['representation0'],level = 1)['output'].detach())
                # print("kl_loss:",kl_loss)
                loss = kl_loss
                self.optimizer.zero_grad()
                loss.backward()
                self.optimizer.step()
                batch_loss.append(loss.item())
            epoch_loss.append(sum(batch_loss)/len(batch_loss))
        print("epoch_loss:",epoch_loss)
        return self.net_s.state_dict()



def FedCom2(args,net_zoo,dataset_train,dataset_test,dict_users,dict_global):
    
    num_model = len(net_zoo)
    for i in range(num_model):
        net_zoo[i].train()
    turntable = int(args.frac*args.num_users/num_model) 
    net_glob_arr_1 = []
    net_glob_arr_2 = []
    for index in range(turntable):
        net_glob_arr_1.append(copy.deepcopy(net_zoo[0].state_dict()))
        net_glob_arr_2.append(copy.deepcopy(net_zoo[1].state_dict()))
    
    acc = []
    for i in range(num_model):
        acc.append([])
    for iter in range(args.epochs):
        print('*'*80)
        print('Round {:3d}'.format(iter))
        w_global = []
        m = max(int(args.frac*args.num_users),1)
        idxs_users = np.random.choice(range(args.num_users),m,replace=False)

        for index,idx in enumerate(idxs_users):
            local = LocalUpdate_FedCom2(args=args,dataset=dataset_train,idxs=dict_users[idx])
            if int(index/turntable) <= 0:
                net_zoo[int(index/turntable)].load_state_dict(net_glob_arr_1[int(index%turntable)])
                w_local = local.train(round=iter,net=copy.deepcopy(net_zoo[int(index/turntable)]).to(args.device))
                net_glob_arr_1[int(index%turntable)] = copy.deepcopy(w_local) 
            else:
                net_zoo[int(index/turntable)].load_state_dict(net_glob_arr_2[int(index%turntable)])
                w_local = local.train(round=iter,net=copy.deepcopy(net_zoo[int(index/turntable)]).to(args.device))
                net_glob_arr_2[int(index%turntable)] = copy.deepcopy(w_local)  
        w_global.append(AggregationNoData(net_glob_arr_1))
        w_global.append(AggregationNoData(net_glob_arr_2))
        net_zoo[0].load_state_dict(w_global[0])
        net_zoo[1].load_state_dict(w_global[1])

        glb_net1 = copy.deepcopy(net_zoo[0])
        glb_net1.load_state_dict(w_global[0])
        glb_net2 = copy.deepcopy(net_zoo[1])
        glb_net2.load_state_dict(w_global[1])


        sever = SeverUpdate_FedCom2(args=args,net_s = copy.deepcopy(glb_net1),net_t=copy.deepcopy(glb_net2),round=iter,dataset_global = dataset_train,dict_global = dict_global)
        w_list1 = sever.train()
        glb_net1.load_state_dict(w_list1)
        sever = SeverUpdate_FedCom2(args=args,net_s = copy.deepcopy(glb_net2),net_t=copy.deepcopy(glb_net1),round=iter,dataset_global = dataset_train,dict_global = dict_global)
        w_list2 = sever.train()
        net_zoo[0].load_state_dict(w_list1)
        net_zoo[1].load_state_dict(w_list2)
        for index in range(turntable):
            net_glob_arr_1[index] = w_list1
            net_glob_arr_2[index] = w_list2

        acc_dict = {}
        for i in range(num_model):
            a = test_single(net_zoo[i],dataset_test,args)
            acc_dict['accuracy_cnn{}'.format(i+1)] = a
            acc[i].append(a)
        wandb.log(acc_dict)
    save_result(acc,'test_acc',args)     
        
def test_single(net_glo,dataset_test,args):
    acc_test,loss_test = test_img_avg(net_glo,dataset_test,args)
    print("Testing accuracy: {:.2f}, Testing loss: {:.2f}".format(acc_test,loss_test))
    return acc_test.item()

