import torch
import torch.nn.functional as F
from torch.utils.data import DataLoader
from torch import nn 
import numpy as np
from models.Update import DatasetSplit
from utils.save_result import save_result
from models.aggregation import Aggregation
from models.test import test_img
from models.branchnet import BranchNet
from torch.autograd import Variable
from blocklize.block_meta import MODEL_ZOO,MODEL_BLOCKS
from simlarity.compute_simlarity import *

def KD(input_p,input_q,T = 1):
    kl_loss = nn.KLDivLoss(reduction="batchmean")
    p = F.log_softmax(input_p/T,dim=1)
    q = F.softmax(input_q/T,dim=1)
    result = kl_loss(p,q)
    return result

class LocalUpdate_FedDery(object):
    def __init__(self,args,dataset=None,idxs = None):
        self.args = args
        self.loss_func = nn.CrossEntropyLoss()
        self.ldr_train = DataLoader(DatasetSplit(dataset,idxs),self.args.local_bs,shuffle=True)
    def train(self,round,net):
        net.train()
        if self.args.optimizer == 'sgd':
            optimizer = torch.optim.SGD(net.parameters(), lr=self.args.lr*(self.args.lr_decay**round),
                                        momentum=self.args.momentum,weight_decay=self.args.weight_decay)
        elif self.args.optimizer == 'adam':
            optimizer = torch.optim.Adam(net.parameters(), lr=self.args.lr)
        epoch_loss = []
        for iter in range(self.args.local_ep):
            batch_loss = []
            for batch_idx,(images,labels) in enumerate(self.ldr_train):
                images,labels = images.to(self.args.device),labels.to(self.args.device)
                net.zero_grad()
                out_of_local = net(images)
                log_probs = out_of_local['output']
                loss = self.loss_func(log_probs,labels)
                loss.backward()
                optimizer.step()

                batch_loss.append(loss.item())
            epoch_loss.append(sum(batch_loss)/len(batch_loss))

        return net.state_dict()

class SeverUpdate_FedDery(object):
    def __init__(self):
        pass
    
    def train(self):
        pass


def FedDery(args,net_list,dataset_train,dataset_test,dict_users,dict_global):
    num_net = len(net_list)
    if num_net != args.num_groups:
        exit("error num_groups")
    acc = []
    for iter in range(args.epochs):
        print('*'*80)
        print('Round {:3d}'.format(iter))

        m = max(int(args.frac*args.num_users),1)
        # idxs_users = np.random.choice(range(args.num_users),m,replace=False)
        group_idxs_users = []
        m1 = max(int(args.frac*args.num_users*args.group1_frac),1)
        group1_idxs_users = np.random.choice(range(0,int(args.num_users*args.group1_frac)),m1,replace=False)
        m2 = max(int(args.frac*args.num_users*args.group2_frac),1)
        group2_idxs_users = np.random.choice(range(int(args.num_users*args.group1_frac),int(args.num_users*(args.group1_frac+args.group2_frac))),m2,replace=False)
        # m3 = max(int(args.frac*args.num_users*args.group3_frac),1)
        # group3_idxs_users = np.random.choice(range(int(args.num_users*(args.group1_frac+args.group2_frac)),args.num_users),m3,replace=False)
        group_idxs_users.append(group1_idxs_users) 
        group_idxs_users.append(group2_idxs_users)
        # group_idxs_users.append(group3_idxs_users)
        w_globals = [] 
        w_list = []
        if iter < 300:
            #stage1 pre-train
            for id in range(args.num_groups):
                w_locals = []
                lens = []
                for idx in group_idxs_users[id]:
                    local = LocalUpdate_FedDery(args=args,dataset=dataset_train,idxs=dict_users[idx])

                    w_local = local.train(round=iter,net=net_list[id].to(args.device))
                    w_locals.append(w_local)
                    lens.append(len(dict_users[idx]))
                w_global = Aggregation(w_locals,lens)
                net_list[id].load_state_dict(w_global)
        elif iter < 600:
            #stage2 compute simlarity and partition
            compute_simlarity(args=args,net_list=net_list,dataset_global=dataset_train,dict_global=dict_global)
                
            pass
            
        elif iter < 900:
            
            #stage3 train the Conv layer
            pass
        elif iter < 1200:
            #stage4 train with the dery block
            pass
        acc.append(test(net_list,dataset_test,args))
    
    save_result(acc,'test_acc',args)

def test(net_list,dataset_test,args):
    acc_test,loss_test = test_img(net_list,dataset_test,args)

    print("Testing accuracy: {:.2f}, Testing loss: {:.2f}".format(acc_test,loss_test))


    return acc_test.item()
                
            

    