{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import threading\n",
    "import socket\n",
    "import numpy as np\n",
    "import torch\n",
    "import torch.optim as optim\n",
    "import torch.nn as nn\n",
    "import torchvision\n",
    "import torchvision.transforms as transforms\n",
    "from torch.autograd import Variable\n",
    "import torch.utils.data as data\n",
    "import argparse\n",
    "import logging\n",
    "import os\n",
    "import copy\n",
    "import random\n",
    "import json\n",
    "\n",
    "import datetime\n",
    "#from torch.utils.tensorboard import SummaryWriter\n",
    "\n",
    "from MyThread import MyThread\n",
    "from model import *\n",
    "from utils import *\n",
    "from vggmodel import *\n",
    "import pickle\n",
    "from AES import DeCrypt\n",
    "import time\n",
    "import sklearn.metrics.pairwise as smp"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "# def handle_client(client_socket, global_para):\n",
    "#     # 接收客户端发送的数据\n",
    "#     total_data = bytes()\n",
    "#     while True:\n",
    "#     # 将收到的数据拼接起来\n",
    "#         data = client_socket.recv(1024)\n",
    "#         total_data += data\n",
    "#         if len(data) < 1024:\n",
    "#             break\n",
    "    \n",
    "#     noise_para = pickle.loads(total_data)\n",
    "#     print(noise_para)\n",
    "    \n",
    "#     global_para = update_global_model(noise_para, global_para)\n",
    "\n",
    "#     print(global_para)\n",
    "\n",
    "#     # 发送响应数据给客户端\n",
    "#     response = \"Gradients Received.\"\n",
    "#     client_socket.send(response.encode())\n",
    "\n",
    "#     # 关闭客户端连接\n",
    "#     client_socket.close()\n",
    "#     return global_para\n",
    "\n",
    "# 本地文件传输\n",
    "def handle_client(client_socket):\n",
    "\n",
    "    # 接收客户端发送的数据\n",
    "    file_name = client_socket.recv(1024)\n",
    "    response = \"Gradients Received.\"\n",
    "    client_socket.send(response.encode())\n",
    "\n",
    "    with open(file_name, 'rb') as f:\n",
    "        enc_bytes = f.read()\n",
    "\n",
    "    key = client_socket.recv(1024)\n",
    "    \n",
    "    # 处理接收到的数据\n",
    "    noise_bytes = DeCrypt(key, enc_bytes)\n",
    "    noise_para = pickle.loads(noise_bytes)\n",
    "\n",
    "    # 发送响应数据给客户端\n",
    "    response = \"Key Received.\"\n",
    "    client_socket.send(response.encode())\n",
    "\n",
    "    # 关闭客户端连接\n",
    "    client_socket.close()\n",
    "    return noise_para\n",
    "\n",
    "# def handle_client(client_socket, global_para):\n",
    "    \n",
    "#     total_data = bytes()\n",
    "#     while True:\n",
    "#     # 将收到的数据拼接起来\n",
    "#         data = client_socket.recv(1024)\n",
    "#         total_data += data\n",
    "#         if len(data) < 1024:\n",
    "#             break\n",
    "\n",
    "#     enc_bytes = copy.deepcopy(total_data)\n",
    "#     response = \"Gradients Received.\"\n",
    "#     client_socket.send(response.encode())\n",
    "    \n",
    "#     key = client_socket.recv(1024)\n",
    "#     response = \"Key Received.\"\n",
    "#     client_socket.send(response.encode())\n",
    "\n",
    "#     noise_bytes = DeCrypt(key, enc_bytes)\n",
    "#     noise_para = pickle.loads(noise_bytes)\n",
    "#     global_para = update_global_model(noise_para, global_para)\n",
    "    \n",
    "#     # 关闭客户端连接\n",
    "#     client_socket.close()\n",
    "\n",
    "#     return global_para\n",
    "\n",
    "\n",
    "# def update_global_model(net_para, global_para):\n",
    "#     for key in net_para:\n",
    "#         global_para[key] = net_para[key]\n",
    "#     return global_para"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# # Socket通信\n",
    "# def handle_client(client_socket) -> dict:\n",
    "    \n",
    "#     total_data = bytes()\n",
    "#     while True:\n",
    "#     # 将收到的数据拼接起来\n",
    "#         data = client_socket.recv(1024)\n",
    "#         total_data += data\n",
    "#         if len(data) < 1024:\n",
    "#             break\n",
    "\n",
    "#     enc_bytes = copy.deepcopy(total_data)\n",
    "#     response = \"Gradients Received.\"\n",
    "#     client_socket.send(response.encode())\n",
    "    \n",
    "#     key = client_socket.recv(1024)\n",
    "#     response = \"Key Received.\"\n",
    "#     client_socket.send(response.encode())\n",
    "\n",
    "#     noise_bytes = DeCrypt(key, enc_bytes)\n",
    "#     noise_para = pickle.loads(noise_bytes)\n",
    "    \n",
    "#     # 关闭客户端连接\n",
    "#     client_socket.close()\n",
    "\n",
    "#     return noise_para\n",
    "\n",
    "# def start_server(id) -> dict:\n",
    "#     # 创建TCP socket对象\n",
    "#     server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "#     # 绑定服务器地址和端口\n",
    "#     port = 12345 + id\n",
    "#     server_address = ('localhost', port)\n",
    "#     server_socket.bind(server_address)\n",
    "#     # 监听连接请求\n",
    "#     server_socket.listen(5)\n",
    "\n",
    "#     print(\"Server started. Listening on {}:{}\".format(*server_address))\n",
    "\n",
    "#     # 等待客户端连接\n",
    "#     client_socket, client_address = server_socket.accept()\n",
    "#     print(\"Accepted connection from {}:{}\".format(*client_address))\n",
    "\n",
    "#     # 创建线程处理客户端请求\n",
    "#     # client_thread = threading.Thread(target=handle_client, args=(client_socket,))\n",
    "#     client_thread = MyThread(func=handle_client, args=(client_socket,))\n",
    "#     client_thread.start()\n",
    "#     client_thread.join()\n",
    "#     noise_para = client_thread.get_result()\n",
    "\n",
    "#     return noise_para\n",
    "\n",
    "def start_server(n_parties) -> (list, list):\n",
    "    # 创建TCP socket对象\n",
    "    server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "    # 绑定服务器地址和端口\n",
    "    server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n",
    "    server_address = ('localhost', 12345)\n",
    "    server_socket.bind(server_address)\n",
    "    # 监听连接请求\n",
    "    server_socket.listen(5)\n",
    "\n",
    "    print(\"Server started. Listening on {}:{}\".format(*server_address))\n",
    "\n",
    "    \n",
    "    client_threads = []\n",
    "    client_addresses = []\n",
    "    while True:\n",
    "        # 等待客户端连接\n",
    "        client_socket, client_address = server_socket.accept()\n",
    "        client_addresses.append(client_address)\n",
    "        print(\"Accepted connection from {}:{}\".format(*client_address))\n",
    "\n",
    "        # 创建线程处理客户端请求\n",
    "        # client_thread = threading.Thread(target=handle_client, args=(client_socket,))\n",
    "        client_thread = MyThread(func=handle_client, args=(client_socket,))\n",
    "        client_threads.append(client_thread)\n",
    "        client_thread.start()\n",
    "        if len(client_threads) == n_parties:\n",
    "            break\n",
    "    \n",
    "    local_para = []\n",
    "    for client_thread in client_threads:\n",
    "        client_thread.join()\n",
    "        noise_para = client_thread.get_result()\n",
    "        local_para.append(noise_para)\n",
    "    \n",
    "    return client_addresses, local_para\n",
    "\n",
    "\n",
    "# if __name__ == '__main__':\n",
    "#     start_server()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "def get_args():\n",
    "    parser = argparse.ArgumentParser()\n",
    "    parser.add_argument('--model', type=str, default='simple-cnn', help='neural network used in training')\n",
    "    # parser.add_argument('--model', type=str, default='vgg', help='neural network used in training')\n",
    "    parser.add_argument('--dataset', type=str, default='mnist', help='dataset used for training')\n",
    "    # parser.add_argument('--dataset', type=str, default='cifar10', help='dataset used for training')\n",
    "    parser.add_argument('--net_config', type=lambda x: list(map(int, x.split(', '))))\n",
    "    parser.add_argument('--partition', type=str, default='noniid-labeldir', help='the data partitioning strategy')\n",
    "    parser.add_argument('--batch-size', type=int, default=64, help='input batch size for training (default: 64)')\n",
    "    parser.add_argument('--lr', type=float, default=0.01, help='learning rate (default: 0.01)')\n",
    "    parser.add_argument('--epochs', type=int, default=1, help='number of local epochs')\n",
    "    parser.add_argument('--n_parties', type=int, default=3,  help='number of workers in a distributed cluster')\n",
    "    parser.add_argument('--alg', type=str, default='fedavg',\n",
    "                            help='fl algorithms: fedavg/fedprox/scaffold/fednova/moon')\n",
    "    parser.add_argument('--use_projection_head', type=bool, default=False, help='whether add an additional header to model or not (see MOON)')\n",
    "    parser.add_argument('--out_dim', type=int, default=256, help='the output dimension for the projection layer')\n",
    "    parser.add_argument('--loss', type=str, default='contrastive', help='for moon')\n",
    "    parser.add_argument('--temperature', type=float, default=0.5, help='the temperature parameter for contrastive loss')\n",
    "    parser.add_argument('--comm_round', type=int, default=1, help='number of maximum communication roun')\n",
    "    parser.add_argument('--is_same_initial', type=int, default=1, help='Whether initial all the models with the same parameters in fedavg')\n",
    "    parser.add_argument('--init_seed', type=int, default=0, help=\"Random seed\")\n",
    "    parser.add_argument('--dropout_p', type=float, required=False, default=0.0, help=\"Dropout probability. Default=0.0\")\n",
    "    parser.add_argument('--datadir', type=str, required=False, default=\"./data/\", help=\"Data directory\")\n",
    "    parser.add_argument('--reg', type=float, default=1e-5, help=\"L2 regularization strength\")\n",
    "    parser.add_argument('--logdir', type=str, required=False, default=\"./logs/\", help='Log directory path')\n",
    "    parser.add_argument('--modeldir', type=str, required=False, default=\"./models/\", help='Model directory path')\n",
    "    parser.add_argument('--beta', type=float, default=0.5, help='The parameter for the dirichlet distribution for data partitioning')\n",
    "    parser.add_argument('--device', type=str, default='cuda:0', help='The device to run the program')\n",
    "    parser.add_argument('--log_file_name', type=str, default=None, help='The log file name')\n",
    "    parser.add_argument('--optimizer', type=str, default='sgd', help='the optimizer')\n",
    "    parser.add_argument('--mu', type=float, default=0.001, help='the mu parameter for fedprox')\n",
    "    parser.add_argument('--noise', type=float, default=0, help='how much noise we add to some party')\n",
    "    parser.add_argument('--noise_type', type=str, default='level', help='Different level of noise or different space of noise')\n",
    "    parser.add_argument('--rho', type=float, default=0.9, help='Parameter controlling the momentum SGD')\n",
    "    parser.add_argument('--sample', type=float, default=1, help='Sample ratio for each communication round')\n",
    "    parser.add_argument('--tau', type=float, default=0.1, help='Step size for Bipole')\n",
    "    # args = parser.parse_args()\n",
    "    args = parser.parse_args(args=[])\n",
    "\n",
    "    return args\n",
    "\n",
    "\n",
    "# def init_nets(net_configs, dropout_p, n_parties, args):\n",
    "\n",
    "#     nets = {net_i: None for net_i in range(n_parties)}\n",
    "\n",
    "#     if args.dataset in {'mnist', 'cifar10', 'svhn', 'fmnist'}:\n",
    "#         n_classes = 10\n",
    "#     if args.use_projection_head:\n",
    "#         add = \"\"\n",
    "#         if \"mnist\" in args.dataset and args.model == \"simple-cnn\":\n",
    "#             add = \"-mnist\"\n",
    "#         for net_i in range(n_parties):\n",
    "#             net = ModelFedCon(args.model+add, args.out_dim, n_classes, net_configs)\n",
    "#             nets[net_i] = net\n",
    "#     else:\n",
    "#         if args.alg == 'moon':\n",
    "#             add = \"\"\n",
    "#             if \"mnist\" in args.dataset and args.model == \"simple-cnn\":\n",
    "#                 add = \"-mnist\"\n",
    "#             for net_i in range(n_parties):\n",
    "#                 net = ModelFedCon_noheader(args.model+add, args.out_dim, n_classes, net_configs)\n",
    "#                 nets[net_i] = net\n",
    "#         else:\n",
    "#             for net_i in range(n_parties):\n",
    "#                 if args.model == \"simple-cnn\":\n",
    "#                     if args.dataset in (\"cifar10\", \"cinic10\", \"svhn\"):\n",
    "#                         net = SimpleCNN(input_dim=(16 * 5 * 5), hidden_dims=[120, 84], output_dim=10)\n",
    "#                     elif args.dataset in (\"mnist\", 'femnist', 'fmnist'):\n",
    "#                         net = SimpleCNNMNIST(input_dim=(16 * 4 * 4), hidden_dims=[120, 84], output_dim=10)\n",
    "#                     elif args.dataset == 'celeba':\n",
    "#                         net = SimpleCNN(input_dim=(16 * 5 * 5), hidden_dims=[120, 84], output_dim=2)\n",
    "#                 nets[net_i] = net\n",
    "\n",
    "#     model_meta_data = []\n",
    "#     layer_type = []\n",
    "#     for (k, v) in nets[0].state_dict().items():\n",
    "#         model_meta_data.append(v.shape)\n",
    "#         layer_type.append(k)\n",
    "#     return nets, model_meta_data, layer_type\n",
    "\n",
    "def init_nets(net_configs, dropout_p, n_parties, args):\n",
    "\n",
    "    nets = {net_i: None for net_i in range(n_parties)}\n",
    "\n",
    "    if args.dataset in {'mnist', 'cifar10', 'svhn', 'fmnist'}:\n",
    "        n_classes = 10\n",
    "    elif args.dataset == 'celeba':\n",
    "        n_classes = 2\n",
    "    elif args.dataset == 'cifar100':\n",
    "        n_classes = 100\n",
    "    elif args.dataset == 'tinyimagenet':\n",
    "        n_classes = 200\n",
    "    elif args.dataset == 'femnist':\n",
    "        n_classes = 62\n",
    "    elif args.dataset == 'emnist':\n",
    "        n_classes = 47\n",
    "    elif args.dataset in {'a9a', 'covtype', 'rcv1', 'SUSY'}:\n",
    "        n_classes = 2\n",
    "    if args.use_projection_head:\n",
    "        add = \"\"\n",
    "        if \"mnist\" in args.dataset and args.model == \"simple-cnn\":\n",
    "            add = \"-mnist\"\n",
    "        for net_i in range(n_parties):\n",
    "            net = ModelFedCon(args.model+add, args.out_dim, n_classes, net_configs)\n",
    "            nets[net_i] = net\n",
    "    else:\n",
    "        if args.alg == 'moon':\n",
    "            add = \"\"\n",
    "            if \"mnist\" in args.dataset and args.model == \"simple-cnn\":\n",
    "                add = \"-mnist\"\n",
    "            for net_i in range(n_parties):\n",
    "                net = ModelFedCon_noheader(args.model+add, args.out_dim, n_classes, net_configs)\n",
    "                nets[net_i] = net\n",
    "        else:\n",
    "            for net_i in range(n_parties):\n",
    "                if args.dataset == \"generated\":\n",
    "                    net = PerceptronModel()\n",
    "                elif args.model == \"mlp\":\n",
    "                    if args.dataset == 'covtype':\n",
    "                        input_size = 54\n",
    "                        output_size = 2\n",
    "                        hidden_sizes = [32,16,8]\n",
    "                    elif args.dataset == 'a9a':\n",
    "                        input_size = 123\n",
    "                        output_size = 2\n",
    "                        hidden_sizes = [32,16,8]\n",
    "                    elif args.dataset == 'rcv1':\n",
    "                        input_size = 47236\n",
    "                        output_size = 2\n",
    "                        hidden_sizes = [32,16,8]\n",
    "                    elif args.dataset == 'SUSY':\n",
    "                        input_size = 18\n",
    "                        output_size = 2\n",
    "                        hidden_sizes = [16,8]\n",
    "                    net = FcNet(input_size, hidden_sizes, output_size, dropout_p)\n",
    "                elif args.model == \"vgg\":\n",
    "                    net = vgg11()\n",
    "                elif args.model == \"simple-cnn\":\n",
    "                    if args.dataset in (\"cifar10\", \"cinic10\", \"svhn\"):\n",
    "                        net = SimpleCNN(input_dim=(16 * 5 * 5), hidden_dims=[120, 84], output_dim=10)\n",
    "                    elif args.dataset in (\"mnist\", 'femnist', 'fmnist'):\n",
    "                        net = SimpleCNNMNIST(input_dim=(16 * 4 * 4), hidden_dims=[120, 84], output_dim=10)\n",
    "                    elif args.dataset == 'celeba':\n",
    "                        net = SimpleCNN(input_dim=(16 * 5 * 5), hidden_dims=[120, 84], output_dim=2)\n",
    "                elif args.model == \"vgg-9\":\n",
    "                    if args.dataset in (\"mnist\", 'femnist'):\n",
    "                        net = ModerateCNNMNIST()\n",
    "                    elif args.dataset in (\"cifar10\", \"cinic10\", \"svhn\"):\n",
    "                        # print(\"in moderate cnn\")\n",
    "                        net = ModerateCNN()\n",
    "                    elif args.dataset == 'celeba':\n",
    "                        net = ModerateCNN(output_dim=2)\n",
    "                elif args.model == \"resnet\":\n",
    "                    net = ResNet50_cifar10()\n",
    "                elif args.model == \"vgg16\":\n",
    "                    net = vgg16()\n",
    "                else:\n",
    "                    print(\"not supported yet\")\n",
    "                    exit(1)\n",
    "                nets[net_i] = net\n",
    "\n",
    "    model_meta_data = []\n",
    "    layer_type = []\n",
    "    for (k, v) in nets[0].state_dict().items():\n",
    "        model_meta_data.append(v.shape)\n",
    "        layer_type.append(k)\n",
    "    return nets, model_meta_data, layer_type\n",
    "\n",
    "def to_matrix(local_para_list: list) -> dict:\n",
    "    para_matrix = {}\n",
    "    for key in local_para_list[0]:\n",
    "        grad_list = []\n",
    "        for local_para in local_para_list:\n",
    "            grad_list.append(torch.squeeze(local_para[key].reshape(1, -1)))\n",
    "        para_matrix[key] = torch.tensor(np.array([item.cpu().detach().numpy() for item in grad_list]))\n",
    "    return para_matrix\n",
    "\n",
    "\n",
    "def compute_cosine_similarity(local_para_matrix: dict) -> list:\n",
    "    maxcs = []\n",
    "    for key, matrix in local_para_matrix.items():\n",
    "        cs = smp.cosine_similarity(matrix) - np.eye(matrix.shape[0])\n",
    "        maxcs.append(np.max(cs, axis=1))\n",
    "\n",
    "    scores = []\n",
    "    for id in range(len(maxcs[0])):\n",
    "        sum = 0\n",
    "        for key in range(len(maxcs)):\n",
    "            sum += maxcs[key][id]\n",
    "        scores.append(sum)\n",
    "\n",
    "    return scores\n",
    "\n",
    "\n",
    "def select_and_aggregate(scores: list, client_addresses: list, local_para: list, global_para: dict, p_solo: float, p_team: float) -> dict:\n",
    "    score_address = {}\n",
    "    score_para = {}\n",
    "    client_selected = []\n",
    "    para_selected = []\n",
    "\n",
    "    for i, score in enumerate(scores):\n",
    "        score_address[score] = client_addresses[i]\n",
    "        score_para[score] = local_para[i]\n",
    "        \n",
    "    scores.sort()\n",
    "    score_selected = scores[math.ceil(p_solo * len(scores)) : math.floor((1 - p_team) * len(scores))]\n",
    "\n",
    "    for score in score_selected:\n",
    "        client_selected.append(score_address[score])\n",
    "        para_selected.append(score_para[score])\n",
    "    \n",
    "    print(client_selected)\n",
    "\n",
    "    num_selected = len(para_selected)\n",
    "    \n",
    "    for idx in range(num_selected):\n",
    "        noise_para = para_selected[idx]\n",
    "        if idx == 0:\n",
    "            for key in noise_para:\n",
    "                global_para[key] = noise_para[key] / num_selected\n",
    "        else:\n",
    "            for key in noise_para:\n",
    "                global_para[key] += noise_para[key] / num_selected\n",
    "    return global_para\n",
    "\n",
    "\n",
    "def connect_to_server(global_para: dict, server_address: tuple):\n",
    "    # 创建TCP socket对象\n",
    "    client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "    client_socket.bind(('127.0.0.1',12345))\n",
    "    # 服务器地址和端口\n",
    "\n",
    "    try:\n",
    "        # 连接服务器\n",
    "        client_socket.connect(server_address)\n",
    "        print(\"Connected to {}:{}\".format(*server_address))\n",
    "\n",
    "        # 发送数据到服务器\n",
    "        global_bytes = pickle.dumps(global_para)\n",
    "        client_socket.send(global_bytes)\n",
    "\n",
    "        # 接收服务器响应\n",
    "        response = client_socket.recv(1024)\n",
    "        print(\"Server response: {}\".format(response.decode()))\n",
    "\n",
    "    except ConnectionRefusedError:\n",
    "        print(\"Connection refused. Make sure the server is running.\")\n",
    "\n",
    "    finally:\n",
    "        # 关闭客户端连接\n",
    "        client_socket.close()\n",
    "\n",
    "\n",
    "def connect_to_server_file(server_address: tuple, file_name: str):\n",
    "    # 创建TCP socket对象\n",
    "    client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "    client_socket.bind(('127.0.0.1',12344))\n",
    "    # 服务器地址和端口\n",
    "\n",
    "    try:\n",
    "        # 连接服务器\n",
    "        client_socket.connect(server_address)\n",
    "        print(\"Connected to {}:{}\".format(*server_address))\n",
    "\n",
    "        # 发送数据到服务器\n",
    "        client_socket.send(file_name.encode())\n",
    "\n",
    "        # 接收服务器响应\n",
    "        response = client_socket.recv(1024)\n",
    "        print(\"Server response: {}\".format(response.decode()))\n",
    "\n",
    "    except ConnectionRefusedError:\n",
    "        print(\"Connection refused. Make sure the server is running.\")\n",
    "\n",
    "    finally:\n",
    "        # 关闭客户端连接\n",
    "        client_socket.close()\n",
    "\n",
    "\n",
    "def bipole(global_model, scores: list, client_addresses: list, local_para: list, global_para: dict, p_solo: float, p_team: float, R: list):\n",
    "    round = len(R) - 1\n",
    "        \n",
    "    acc = []\n",
    "\n",
    "    p_solo_list = [p_solo, p_solo, p_solo + R[round] * args.tau]\n",
    "    p_team_list = [p_team, p_team + args.tau, p_team]\n",
    "\n",
    "    \n",
    "    _, test_dl_global, _, _ = get_dataloader(args.dataset, args.datadir, args.batch_size, 32)\n",
    "\n",
    "    for i in range(3):\n",
    "        global_para = select_and_aggregate(scores, client_addresses, local_para, global_para, p_solo_list[i], p_team_list[i])\n",
    "        global_model.load_state_dict(global_para)\n",
    "        global_model.to(device)\n",
    "        test_acc = compute_accuracy(global_model, test_dl_global, get_confusion_matrix=False, device=device)\n",
    "        acc.append(test_acc)\n",
    "\n",
    "        if i == 1:\n",
    "            if acc[0] < acc[1]:\n",
    "                p_team = p_team + args.tau\n",
    "            \n",
    "            if R[round] == 0 or (round > 3 and abs(sum(R[round - 4:])) < 2):\n",
    "                global_para = select_and_aggregate(scores, client_addresses, local_para, global_para, p_solo, p_team)\n",
    "                R.append(0)\n",
    "                break\n",
    "                    \n",
    "        if i == 2:\n",
    "            if acc[0] < acc[2]:\n",
    "                p_solo = p_solo + args.tau\n",
    "                R.append(1)\n",
    "            else:\n",
    "                R.append(-1)\n",
    "            \n",
    "            global_para = select_and_aggregate(scores, client_addresses, local_para, global_para, p_solo, p_team)\n",
    "        \n",
    "    return global_para, p_solo, p_team, R\n",
    "    \n",
    "#####################################################\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    # torch.set_printoptions(profile=\"full\")\n",
    "    args = get_args()\n",
    "    mkdirs(args.logdir)\n",
    "    mkdirs(args.modeldir)\n",
    "    if args.log_file_name is None:\n",
    "        argument_path='experiment_arguments-%s.json' % datetime.datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")\n",
    "    else:\n",
    "        argument_path=args.log_file_name+'.json'\n",
    "    with open(os.path.join(args.logdir, argument_path), 'w') as f:\n",
    "        json.dump(str(args), f)\n",
    "    device = torch.device(args.device)\n",
    "    # logging.basicConfig(filename='test.log', level=logger.info, filemode='w')\n",
    "    # logging.info(\"test\")\n",
    "    for handler in logging.root.handlers[:]:\n",
    "        logging.root.removeHandler(handler)\n",
    "\n",
    "    if args.log_file_name is None:\n",
    "        args.log_file_name = 'experiment_log-%s' % (datetime.datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\"))\n",
    "    log_path=args.log_file_name+'.log'\n",
    "    logging.basicConfig(\n",
    "        filename=os.path.join(args.logdir, log_path),\n",
    "        # filename='/home/qinbin/test.log',\n",
    "        format='%(asctime)s %(levelname)-8s %(message)s',\n",
    "        datefmt='%m-%d %H-%M', level=logging.DEBUG, filemode='w')\n",
    "\n",
    "    logger = logging.getLogger()\n",
    "    logger.setLevel(logging.DEBUG)\n",
    "    logger.info(device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<class 'model.SimpleCNNMNIST'>\n"
     ]
    }
   ],
   "source": [
    "if args.alg == 'fedavg':\n",
    "    logger.info(\"Initializing nets\")\n",
    "    nets, local_model_meta_data, layer_type = init_nets(args.net_config, args.dropout_p, args.n_parties, args)\n",
    "    global_models, global_model_meta_data, global_layer_type = init_nets(args.net_config, 0, 1, args)\n",
    "    global_model = global_models[0]\n",
    "\n",
    "    print(type(global_model))\n",
    "\n",
    "    global_para = global_model.state_dict()\n",
    "    if args.is_same_initial:\n",
    "        for net_id, net in nets.items():\n",
    "            net.load_state_dict(global_para)\n",
    "\n",
    "    # client_addresses, local_para = start_server(args.n_parties)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "# # 多轮无Bipole\n",
    "# for round in range(args.comm_round):\n",
    "#     logger.info(\"in comm round:\" + str(round))\n",
    "\n",
    "#     client_addresses, local_para = start_server(args.n_parties)\n",
    "\n",
    "#     for idx in range(args.n_parties):\n",
    "#         noise_para = local_para[idx]\n",
    "#         if idx == 0:\n",
    "#             for key in noise_para:\n",
    "#                 global_para[key] = noise_para[key] / args.n_parties\n",
    "#         else:\n",
    "#             for key in noise_para:\n",
    "#                 global_para[key] += noise_para[key] / args.n_parties\n",
    "    \n",
    "#     for client_address in client_addresses:\n",
    "#         connect_to_server(global_para, client_address)\n",
    "    \n",
    "#     global_model.load_state_dict(global_para)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Server started. Listening on localhost:12345\n",
      "Accepted connection from 127.0.0.1:12346\n",
      "Accepted connection from 127.0.0.1:12347\n",
      "Accepted connection from 127.0.0.1:12348\n",
      "[('127.0.0.1', 12347), ('127.0.0.1', 12348)]\n"
     ]
    }
   ],
   "source": [
    "# with open('local.pkl', 'rb') as f:\n",
    "#     local_para = pickle.load(f)\n",
    "\n",
    "R = [0]\n",
    "\n",
    "for round in range(args.comm_round):\n",
    "    logger.info(\"in comm round:\" + str(round))\n",
    "\n",
    "    client_addresses, local_para = start_server(args.n_parties)\n",
    "\n",
    "    # for idx in range(args.n_parties):\n",
    "    #     noise_para = local_para[idx]\n",
    "    #     if idx == 0:\n",
    "    #         for key in noise_para:\n",
    "    #             global_para[key] = noise_para[key] / args.n_parties\n",
    "    #     else:\n",
    "    #         for key in noise_para:\n",
    "    #             global_para[key] += noise_para[key] / args.n_parties\n",
    "\n",
    "    # 固定筛去两侧1/5的梯度\n",
    "    local_para_matrix = to_matrix(local_para)\n",
    "\n",
    "    scores = compute_cosine_similarity(local_para_matrix)\n",
    "\n",
    "    # client_addresses = [('127.0.0.1', 61371), ('127.0.0.1', 61373), ('127.0.0.1', 61375), ('127.0.0.1', 61377)]\n",
    "\n",
    "    global_para = select_and_aggregate(scores, client_addresses, local_para, global_para, 1/5, 0)\n",
    "\n",
    "    file_name = 'global_round_0.pkl'\n",
    "    with open(file_name, 'wb') as f:\n",
    "        pickle.dump(global_para, f)\n",
    "    \n",
    "    # for client_address in client_addresses:\n",
    "    #     connect_to_server_file(client_address, file_name)\n",
    "\n",
    "    global_model.load_state_dict(global_para)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      ">> Global Model Train accuracy: 0.722033\n",
      ">> Global Model Test accuracy: 0.731200\n"
     ]
    }
   ],
   "source": [
    "global_model.to(device)\n",
    "\n",
    "train_dl_global, test_dl_global, _, _ = get_dataloader(args.dataset, args.datadir, args.batch_size, 32)\n",
    "\n",
    "train_acc = compute_accuracy(global_model, train_dl_global, device=device)\n",
    "test_acc, conf_matrix = compute_accuracy(global_model, test_dl_global, get_confusion_matrix=True, device=device)\n",
    "\n",
    "\n",
    "logger.info('>> Global Model Train accuracy: %f' % train_acc)\n",
    "logger.info('>> Global Model Test accuracy: %f' % test_acc)\n",
    "\n",
    "print('>> Global Model Train accuracy: %f' % train_acc)\n",
    "print('>> Global Model Test accuracy: %f' % test_acc)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "FedAvg",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.15"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
