import itertools
import math
import random
import numpy as np
import matplotlib.pyplot as plt
import torch.nn.functional as F
import copy
import torch
import time
import os
import json
from datetime import datetime
from torchvision import datasets, transforms

from utils.Logger import ExperimentLogger
from utils.Sampling import mnist_iid, mnist_noniid, cifar_iid, cifar_noniid, fmnist_iid, fmnist_noniid, svhn_iid, \
    svhn_noniid
from models.Nets import MLP, CNNMnist, CNNCifar
from utils.Options import args_parser
from models.Update import LocalUpdate
from models.Test import test_img


def calculate_distance(delta_i, delta_j, beta=1.0, eps=1e-8):
    """Computes weighted distance between two parameter updates"""
    delta_i_flat = torch.cat([v.view(-1) for v in delta_i.values()])
    delta_j_flat = torch.cat([v.view(-1) for v in delta_j.values()])

    euclidean_dist = torch.norm(delta_i_flat - delta_j_flat)
    cosine_sim = F.cosine_similarity(delta_i_flat.unsqueeze(0), delta_j_flat.unsqueeze(0))

    norm_i = torch.norm(delta_i_flat)
    norm_j = torch.norm(delta_j_flat)
    max_norm = torch.max(norm_i, norm_j) + eps

    normalized_euclidean = euclidean_dist / max_norm
    exp_penalty = torch.exp(-beta * cosine_sim)
    distance = normalized_euclidean * exp_penalty

    return distance.item()


def data_valuation_rule(w_locals, w_glob_prev, sced_dist=None):
    """Computes similarity matrix between local model updates"""
    if not w_locals:
        return torch.tensor([])

    n = len(w_locals)
    delta_list = []
    device = w_glob_prev[next(iter(w_glob_prev))].device

    for w_local in w_locals:
        delta = {k: w_local[k] - w_glob_prev[k] for k in w_glob_prev.keys()}
        delta_list.append(delta)

    max_dist = 0
    distance_matrix = torch.zeros(n, n, device=device)
    for i in range(n):
        for j in range(i + 1, n):
            dist = calculate_distance(delta_list[i], delta_list[j])
            distance_matrix[i, j] = dist
            distance_matrix[j, i] = dist
            if dist > max_dist:
                max_dist = dist

    alpha = 1
    scaled_dist = alpha * distance_matrix / (max_dist + 1e-8)
    similarity_matrix = 1 - torch.tanh(scaled_dist)
    similarity_matrix.fill_diagonal_(0)

    return similarity_matrix


def calculate_similarity_value(w_glob_prev, subset, similarity_matrix, user_indices):
    """Calculates similarity value for a subset of users"""
    n = len(similarity_matrix)
    value = 0
    for i in range(len(user_indices)):
        for j in range(i + 1, len(user_indices)):
            value += similarity_matrix[user_indices[i], user_indices[j]]
    return value


def exact_shapley_value_similarity(w_glob_prev, w_locals, similarity_matrix):
    """Computes exact Shapley values using similarity matrix"""
    n = len(w_locals)
    shapley_values = np.zeros(n)
    user_indices = list(range(n))

    for i in range(n):
        other_indices = [idx for idx in user_indices if idx != i]
        for subset_size in range(len(other_indices) + 1):
            for subset_indices in itertools.combinations(other_indices, subset_size):
                subset = list(subset_indices) + [i]
                marginal = calculate_similarity_value(w_glob_prev, None, similarity_matrix,
                                                    subset) - calculate_similarity_value(w_glob_prev, None,
                                                                                        similarity_matrix,
                                                                                        list(subset_indices))
                weight = (math.factorial(subset_size) * math.factorial(n - subset_size - 1)) / math.factorial(n)
                shapley_values[i] += weight * marginal

    return shapley_values


def value_aware_fed_avg(round_idx, w_glob_prev, w_locals, prev_weights=None):
    """Performs value-aware federated averaging with exponential smoothing"""
    similarity_matrix = data_valuation_rule(w_locals, w_glob_prev)
    shapley_vals = exact_shapley_value_similarity(w_glob_prev, w_locals, similarity_matrix)

    total = np.sum(shapley_vals)
    shapley_vals = shapley_vals / total if total != 0 else shapley_vals
    print("Shapley values for each user:", shapley_vals)

    gamma = max(0.8 - 0.01 * round_idx, 0.2)

    if prev_weights is not None:
        smoothed_weights = gamma * prev_weights + (1 - gamma) * shapley_vals
    else:
        smoothed_weights = shapley_vals

    total = np.sum(smoothed_weights)
    weights = smoothed_weights / total if total != 0 else np.ones_like(smoothed_weights) / len(smoothed_weights)
    weights = np.round(weights, 5)

    w_glob_new = copy.deepcopy(w_glob_prev)
    for i, w_local in enumerate(w_locals):
        for k in w_glob_new.keys():
            w_glob_new[k] += weights[i] * (w_local[k] - w_glob_prev[k])

    return w_glob_new, weights, shapley_vals


def load_dataset(args):
    """Loads and prepares the specified dataset"""
    trans_mnist = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])
    trans_cifar = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
    ])

    if args.dataset == 'mnist':
        dataset_train = datasets.MNIST('./data/mnist/', train=True, download=True, transform=trans_mnist)
        dataset_test = datasets.MNIST('./data/mnist/', train=False, download=True, transform=trans_mnist)
        if args.iid:
            dict_users = mnist_iid(dataset_train, args.num_users)
        else:
            dict_users = mnist_noniid(dataset_train, args.num_users)
        net_glob = CNNMnist(args=args).to(args.device)

    elif args.dataset == 'cifar':
        dataset_train = datasets.CIFAR10('./data/cifar10', train=True, download=True, transform=trans_cifar)
        dataset_test = datasets.CIFAR10('./data/cifar10', train=False, download=True, transform=trans_cifar)
        if args.iid:
            dict_users = cifar_iid(dataset_train, args.num_users)
        else:
            dict_users = cifar_noniid(dataset_train, args.num_users)
        net_glob = CNNCifar(args=args).to(args.device)

    elif args.dataset == 'fmnist':
        dataset_train = datasets.FashionMNIST('./data/fmnist/', train=True, download=True, transform=trans_mnist)
        dataset_test = datasets.FashionMNIST('./data/fmnist/', train=False, download=True, transform=trans_mnist)
        if args.iid:
            dict_users = fmnist_iid(dataset_train, args.num_users)
        else:
            dict_users = fmnist_noniid(dataset_train, args.num_users)
        net_glob = CNNMnist(args=args).to(args.device)

    elif args.dataset == 'svhn':
        dataset_train = datasets.SVHN('./data/svhn/', split='train', download=True, transform=trans_cifar)
        dataset_test = datasets.SVHN('./data/svhn/', split='test', download=True, transform=trans_cifar)
        if args.iid:
            dict_users = svhn_iid(dataset_train, args.num_users)
        else:
            dict_users = svhn_noniid(dataset_train, args.num_users)
        net_glob = CNNCifar(args=args).to(args.device)

    else:
        raise NotImplementedError(f"Dataset {args.dataset} not implemented.")

    return dataset_train, dataset_test, dict_users, net_glob


def set_seed(seed):
    """Sets random seed for reproducibility"""
    random.seed(seed)
    np.random.seed(seed)
    torch.manual_seed(seed)
    if torch.cuda.is_available():
        torch.cuda.manual_seed_all(seed)
        torch.backends.cudnn.deterministic = True
        torch.backends.cudnn.benchmark = False
        torch.backends.cudnn.enabled = True


if __name__ == "__main__":
    args = args_parser()
    args.device = torch.device('cuda:{}'.format(args.gpu) if torch.cuda.is_available() and args.gpu != -1 else 'cpu')
    if args.seed != None:
        set_seed(args.seed)

    logger = ExperimentLogger(args, run_file_name=os.path.basename(__file__))

    dataset_train, dataset_test, dict_users, net_glob = load_dataset(args)

    acc_initial, _ = test_img(net_glob, dataset_test, args)
    print(f"Initial Global Model Accuracy: {acc_initial:.4f}")

    w_glob = net_glob.state_dict()
    prev_weights = None
    total_shapley = None

    for round_idx in range(1, args.epochs + 1):
        round_start = time.time()

        w_locals = []
        local_train_times = []
        for user_idx, user in enumerate(dict_users.values()):
            local_start = time.time()
            local_update = LocalUpdate(args=args, dataset=dataset_train, idxs=user)
            w_local, _ = local_update.train(net=copy.deepcopy(net_glob).to(args.device))
            w_locals.append(w_local)
            local_train_times.append(time.time() - local_start)

        agg_start = time.time()
        w_glob_next, prev_weights, shapley_vals = value_aware_fed_avg(round_idx, w_glob, w_locals, prev_weights)
        net_glob.load_state_dict(w_glob_next)
        w_glob = w_glob_next
        agg_time = time.time() - agg_start

        local_accs = []
        test_start = time.time()
        for w_local in w_locals:
            net_local = copy.deepcopy(net_glob)
            net_local.load_state_dict(w_local)
            acc, _ = test_img(net_local, dataset_test, args)
            local_accs.append(acc)
        test_time = time.time() - test_start

        global_acc, _ = test_img(net_glob, dataset_test, args)

        total_round_time = time.time() - round_start

        logger.log_round(
            round_idx=round_idx,
            shapley_vals=shapley_vals,
            weights=prev_weights,
            duration=total_round_time,
            local_accs=local_accs,
            global_acc=global_acc
        )

        print(f"Round {round_idx} Completed | Global Acc: {global_acc:.4f} | Time: {total_round_time:.2f}s")

    final_local_accs = []
    for w_local in w_locals:
        net_local = copy.deepcopy(net_glob)
        net_local.load_state_dict(w_local)
        acc, _ = test_img(net_local, dataset_test, args)
        final_local_accs.append(acc)

    logger.finalize(net_glob, final_local_accs)