import copy

import torch
from torch.utils.data import DataLoader
from tqdm import tqdm

from Aggregation.BaseAggregation import BaseAggregation
from utils.utils import DatasetSplit, test


class FedProx(BaseAggregation):
    """
    fedprox
    """
    def __init__(self, config, train_dataset, test_dataset, user_groups, traindata_cls_counts):
        super(FedProx, self).__init__(config, train_dataset, test_dataset, user_groups, traindata_cls_counts)


    def local_train(self, choose_user_ids):
        """
        本地客户端训练
        :param choose_user_ids:
        :return:
        """
        for client_id in choose_user_ids:
            model = self.net[client_id]
            model.train()

            train_loader = DataLoader(DatasetSplit(self.train_dataset, self.user_groups[client_id]),
                                      batch_size=self.config.local_bs, shuffle=True, num_workers=4)
            optimizer = self.get_optimizer(model)
            pbar = tqdm(range(self.config.local_ep), desc='LocalTrain', unit='item')
            local_net = copy.deepcopy(self.global_model).to(self.config.device)
            for i, epoch in enumerate(pbar):
                for idx, (x, y) in enumerate(train_loader):
                    x, y = x.to(self.config.device), y.to(self.config.device)
                    model.zero_grad()
                    # ---------------------------------------
                    output = model(x)
                    loss = torch.nn.functional.cross_entropy(output, y)

                    # if epoch > 0:
                    #     for w, w_t in zip(local_net.parameters(), model.parameters()):
                    #         loss += self.config.mu / 2. * torch.pow(torch.norm(w.data - w_t.data), 2)

                    proximal_term = (self.config.mu / 2) * sum(torch.norm(p - g) ** 2 for p, g in
                                                        zip(model.parameters(), self.global_model.parameters()))
                    loss += proximal_term
                    # ---------------------------------------
                    loss.backward()
                    optimizer.step()

            acc, test_loss = test(model, self.test_loader, self.config)
            pbar.set_postfix(
                {"client_id": client_id, "epoch": epoch, "acc": acc, "test_loss": test_loss},
                refresh=True)
            print(f"client_id: {client_id}, epoch: {epoch}, acc: {acc}, test_loss: {test_loss}")
            self.client_loss_data.append({"client_id": int(client_id), "acc": acc, "test_loss": test_loss})