{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import socket\n",
    "import numpy as np\n",
    "import torch\n",
    "import torch.optim as optim\n",
    "import torch.nn as nn\n",
    "import torchvision\n",
    "import torchvision.transforms as transforms\n",
    "from torch.autograd import Variable\n",
    "import torch.utils.data as data\n",
    "import argparse\n",
    "import logging\n",
    "import os\n",
    "import copy\n",
    "import random\n",
    "import json\n",
    "import datetime\n",
    "#from torch.utils.tensorboard import SummaryWriter\n",
    "\n",
    "from model import *\n",
    "from utils import *\n",
    "import pickle\n",
    "from AES import *\n",
    "import time\n",
    "from MyThread import MyThread"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:root:####################################################################################################\n",
      "INFO:root:Partitioning data\n",
      "INFO:root:Data statistics: {0: {0: 1323, 1: 1187, 2: 32, 3: 468, 4: 411, 5: 651, 6: 539, 7: 398, 8: 1815, 9: 1989}, 1: {0: 1668, 1: 1715, 2: 1484, 3: 1498, 4: 2116, 5: 296, 6: 1091, 7: 3172}, 2: {0: 1592, 1: 47, 2: 477, 3: 2153, 4: 427, 5: 2134, 6: 2492, 7: 1469, 8: 2351}, 3: {0: 667, 1: 1874, 2: 2584, 3: 390, 4: 2104, 5: 1997, 6: 976, 7: 977, 8: 317, 9: 206}, 4: {0: 673, 1: 1919, 2: 1381, 3: 1622, 4: 784, 5: 343, 6: 820, 7: 249, 8: 1368, 9: 3754}}\n",
      "INFO:root:Initializing nets\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len train_dl_global: 60000\n"
     ]
    }
   ],
   "source": [
    "def get_args():\n",
    "    parser = argparse.ArgumentParser()\n",
    "    parser.add_argument('--model', type=str, default='simple-cnn', help='neural network used in training')\n",
    "    parser.add_argument('--dataset', type=str, default='mnist', help='dataset used for training')\n",
    "    parser.add_argument('--net_config', type=lambda x: list(map(int, x.split(', '))))\n",
    "    parser.add_argument('--partition', type=str, default='noniid-labeldir', help='the data partitioning strategy')\n",
    "    parser.add_argument('--batch-size', type=int, default=64, help='input batch size for training (default: 64)')\n",
    "    parser.add_argument('--lr', type=float, default=0.01, help='learning rate (default: 0.01)')\n",
    "    parser.add_argument('--epochs', type=int, default=1, help='number of local epochs')\n",
    "    parser.add_argument('--n_parties', type=int, default=5,  help='number of workers in a distributed cluster')\n",
    "    parser.add_argument('--alg', type=str, default='fedavg',\n",
    "                            help='fl algorithms: fedavg/fedprox/scaffold/fednova/moon')\n",
    "    parser.add_argument('--use_projection_head', type=bool, default=False, help='whether add an additional header to model or not (see MOON)')\n",
    "    parser.add_argument('--out_dim', type=int, default=256, help='the output dimension for the projection layer')\n",
    "    parser.add_argument('--loss', type=str, default='contrastive', help='for moon')\n",
    "    parser.add_argument('--temperature', type=float, default=0.5, help='the temperature parameter for contrastive loss')\n",
    "    parser.add_argument('--comm_round', type=int, default=1, help='number of maximum communication roun')\n",
    "    parser.add_argument('--is_same_initial', type=int, default=1, help='Whether initial all the models with the same parameters in fedavg')\n",
    "    parser.add_argument('--init_seed', type=int, default=0, help=\"Random seed\")\n",
    "    parser.add_argument('--dropout_p', type=float, required=False, default=0.0, help=\"Dropout probability. Default=0.0\")\n",
    "    parser.add_argument('--datadir', type=str, required=False, default=\"./data/\", help=\"Data directory\")\n",
    "    parser.add_argument('--reg', type=float, default=1e-5, help=\"L2 regularization strength\")\n",
    "    parser.add_argument('--logdir', type=str, required=False, default=\"./logs/\", help='Log directory path')\n",
    "    parser.add_argument('--modeldir', type=str, required=False, default=\"./models/\", help='Model directory path')\n",
    "    parser.add_argument('--beta', type=float, default=0.9, help='The parameter for the dirichlet distribution for data partitioning')\n",
    "    parser.add_argument('--device', type=str, default='cuda:0', help='The device to run the program')\n",
    "    parser.add_argument('--log_file_name', type=str, default=None, help='The log file name')\n",
    "    parser.add_argument('--optimizer', type=str, default='sgd', help='the optimizer')\n",
    "    parser.add_argument('--mu', type=float, default=0.001, help='the mu parameter for fedprox')\n",
    "    parser.add_argument('--noise', type=float, default=0, help='how much noise we add to some party')\n",
    "    parser.add_argument('--noise_type', type=str, default='level', help='Different level of noise or different space of noise')\n",
    "    parser.add_argument('--rho', type=float, default=0.9, help='Parameter controlling the momentum SGD')\n",
    "    parser.add_argument('--sample', type=float, default=1, help='Sample ratio for each communication round')\n",
    "    # args = parser.parse_args()\n",
    "    args = parser.parse_args(args=[])\n",
    "\n",
    "    return args\n",
    "\n",
    "def init_nets(net_configs, dropout_p, n_parties, args):\n",
    "\n",
    "    nets = {net_i: None for net_i in range(n_parties)}\n",
    "\n",
    "    if args.dataset in {'mnist', 'cifar10', 'svhn', 'fmnist'}:\n",
    "        n_classes = 10\n",
    "    if args.use_projection_head:\n",
    "        add = \"\"\n",
    "        if \"mnist\" in args.dataset and args.model == \"simple-cnn\":\n",
    "            add = \"-mnist\"\n",
    "        for net_i in range(n_parties):\n",
    "            net = ModelFedCon(args.model+add, args.out_dim, n_classes, net_configs)\n",
    "            nets[net_i] = net\n",
    "    else:\n",
    "        if args.alg == 'moon':\n",
    "            add = \"\"\n",
    "            if \"mnist\" in args.dataset and args.model == \"simple-cnn\":\n",
    "                add = \"-mnist\"\n",
    "            for net_i in range(n_parties):\n",
    "                net = ModelFedCon_noheader(args.model+add, args.out_dim, n_classes, net_configs)\n",
    "                nets[net_i] = net\n",
    "        else:\n",
    "            for net_i in range(n_parties):\n",
    "                if args.model == \"simple-cnn\":\n",
    "                    if args.dataset in (\"cifar10\", \"cinic10\", \"svhn\"):\n",
    "                        net = SimpleCNN(input_dim=(16 * 5 * 5), hidden_dims=[120, 84], output_dim=10)\n",
    "                    elif args.dataset in (\"mnist\", 'femnist', 'fmnist'):\n",
    "                        net = SimpleCNNMNIST(input_dim=(16 * 4 * 4), hidden_dims=[120, 84], output_dim=10)\n",
    "                    elif args.dataset == 'celeba':\n",
    "                        net = SimpleCNN(input_dim=(16 * 5 * 5), hidden_dims=[120, 84], output_dim=2)\n",
    "                nets[net_i] = net\n",
    "\n",
    "    model_meta_data = []\n",
    "    layer_type = []\n",
    "    for (k, v) in nets[0].state_dict().items():\n",
    "        model_meta_data.append(v.shape)\n",
    "        layer_type.append(k)\n",
    "    return nets, model_meta_data, layer_type\n",
    "\n",
    "\n",
    "def train_net(net_id, net, train_dataloader, test_dataloader, epochs, lr, args_optimizer, device=\"cpu\"):\n",
    "    logger.info('Training network %s' % str(net_id))\n",
    "\n",
    "    train_acc = compute_accuracy(net, train_dataloader, device=device)\n",
    "    test_acc, conf_matrix = compute_accuracy(net, test_dataloader, get_confusion_matrix=True, device=device)\n",
    "\n",
    "    logger.info('>> Pre-Training Training accuracy: {}'.format(train_acc))\n",
    "    logger.info('>> Pre-Training Test accuracy: {}'.format(test_acc))\n",
    "\n",
    "    if args_optimizer == 'adam':\n",
    "        optimizer = optim.Adam(filter(lambda p: p.requires_grad, net.parameters()), lr=lr, weight_decay=args.reg)\n",
    "    elif args_optimizer == 'amsgrad':\n",
    "        optimizer = optim.Adam(filter(lambda p: p.requires_grad, net.parameters()), lr=lr, weight_decay=args.reg,\n",
    "                               amsgrad=True)\n",
    "    elif args_optimizer == 'sgd':\n",
    "        optimizer = optim.SGD(filter(lambda p: p.requires_grad, net.parameters()), lr=lr, momentum=args.rho, weight_decay=args.reg)\n",
    "    criterion = nn.CrossEntropyLoss().to(device)\n",
    "\n",
    "    cnt = 0\n",
    "    if type(train_dataloader) == type([1]):\n",
    "        pass\n",
    "    else:\n",
    "        train_dataloader = [train_dataloader]\n",
    "\n",
    "    #writer = SummaryWriter()\n",
    "\n",
    "    for epoch in range(epochs):\n",
    "        epoch_loss_collector = []\n",
    "        for tmp in train_dataloader:\n",
    "            for batch_idx, (x, target) in enumerate(tmp):\n",
    "                x, target = x.to(device), target.to(device)\n",
    "\n",
    "                optimizer.zero_grad()\n",
    "                x.requires_grad = True\n",
    "                target.requires_grad = False\n",
    "                target = target.long()\n",
    "\n",
    "                out = net(x)\n",
    "                loss = criterion(out, target)\n",
    "\n",
    "                loss.backward()\n",
    "\n",
    "                optimizer.step()\n",
    "\n",
    "                cnt += 1\n",
    "                epoch_loss_collector.append(loss.item())\n",
    "\n",
    "        epoch_loss = sum(epoch_loss_collector) / len(epoch_loss_collector)\n",
    "        logger.info('Epoch: %d Loss: %f' % (epoch, epoch_loss))\n",
    "\n",
    "        #train_acc = compute_accuracy(net, train_dataloader, device=device)\n",
    "        #test_acc, conf_matrix = compute_accuracy(net, test_dataloader, get_confusion_matrix=True, device=device)\n",
    "\n",
    "        #writer.add_scalar('Accuracy/train', train_acc, epoch)\n",
    "        #writer.add_scalar('Accuracy/test', test_acc, epoch)\n",
    "\n",
    "        # if epoch % 10 == 0:\n",
    "        #     logger.info('Epoch: %d Loss: %f' % (epoch, epoch_loss))\n",
    "        #     train_acc = compute_accuracy(net, train_dataloader, device=device)\n",
    "        #     test_acc, conf_matrix = compute_accuracy(net, test_dataloader, get_confusion_matrix=True, device=device)\n",
    "        #\n",
    "        #     logger.info('>> Training accuracy: %f' % train_acc)\n",
    "        #     logger.info('>> Test accuracy: %f' % test_acc)\n",
    "\n",
    "    train_acc = compute_accuracy(net, train_dataloader, device=device)\n",
    "    test_acc, conf_matrix = compute_accuracy(net, test_dataloader, get_confusion_matrix=True, device=device)\n",
    "\n",
    "    logger.info('>> Training accuracy: %f' % train_acc)\n",
    "    logger.info('>> Test accuracy: %f' % test_acc)\n",
    "\n",
    "    net.to('cpu')\n",
    "    logger.info(' ** Training complete **')\n",
    "    return train_acc, test_acc\n",
    "\n",
    "\n",
    "def view_image(train_dataloader):\n",
    "    for (x, target) in train_dataloader:\n",
    "        np.save(\"img.npy\", x)\n",
    "        print(x.shape)\n",
    "        exit(0)\n",
    "\n",
    "\n",
    "def local_train_net(nets, selected, args, net_dataidx_map, test_dl = None, device=\"cpu\"):\n",
    "    avg_acc = 0.0\n",
    "\n",
    "    for net_id, net in nets.items():\n",
    "        if net_id not in selected:\n",
    "            continue\n",
    "        dataidxs = net_dataidx_map[net_id]\n",
    "\n",
    "        logger.info(\"Training network %s. n_training: %d\" % (str(net_id), len(dataidxs)))\n",
    "        # move the model to cuda device:\n",
    "        net.to(device)\n",
    "\n",
    "        noise_level = args.noise\n",
    "        if net_id == args.n_parties - 1:\n",
    "            noise_level = 0\n",
    "\n",
    "        if args.noise_type == 'space':\n",
    "            train_dl_local, test_dl_local, _, _ = get_dataloader(args.dataset, args.datadir, args.batch_size, 32, dataidxs, noise_level, net_id, args.n_parties-1)\n",
    "        else:\n",
    "            noise_level = args.noise / (args.n_parties - 1) * net_id\n",
    "            train_dl_local, test_dl_local, _, _ = get_dataloader(args.dataset, args.datadir, args.batch_size, 32, dataidxs, noise_level)\n",
    "        train_dl_global, test_dl_global, _, _ = get_dataloader(args.dataset, args.datadir, args.batch_size, 32)\n",
    "        n_epoch = args.epochs\n",
    "\n",
    "\n",
    "        trainacc, testacc = train_net(net_id, net, train_dl_local, test_dl, n_epoch, args.lr, args.optimizer, device=device)\n",
    "        logger.info(\"net %d final test acc %f\" % (net_id, testacc))\n",
    "        avg_acc += testacc\n",
    "        # saving the trained models here\n",
    "        # save_model(net, net_id, args)\n",
    "        # else:\n",
    "        #     load_model(net, net_id, device=device)\n",
    "    avg_acc /= len(selected)\n",
    "    if args.alg == 'local_training':\n",
    "        logger.info(\"avg test acc %f\" % avg_acc)\n",
    "\n",
    "    nets_list = list(nets.values())\n",
    "    return nets_list\n",
    "\n",
    "\n",
    "def get_partition_dict(dataset, partition, n_parties, init_seed=0, datadir='./data', logdir='./logs', beta=0.5):\n",
    "    seed = init_seed\n",
    "    np.random.seed(seed)\n",
    "    torch.manual_seed(seed)\n",
    "    random.seed(seed)\n",
    "    X_train, y_train, X_test, y_test, net_dataidx_map, traindata_cls_counts = partition_data(\n",
    "        dataset, datadir, logdir, partition, n_parties, beta=beta)\n",
    "\n",
    "    return net_dataidx_map\n",
    "\n",
    "\n",
    "def AddNoise(dict, mean=0., std=1) -> dict:\n",
    "    for key in dict:\n",
    "        dict[key] += torch.randn(dict[key].size()) * std + mean\n",
    "    return dict\n",
    "\n",
    "\n",
    "def connect_to_server(key, enc_bytes):\n",
    "    # 创建TCP socket对象\n",
    "    client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "    client_socket.bind(('127.0.0.1',12346))\n",
    "    # 服务器地址和端口\n",
    "    server_address = ('localhost', 12345)\n",
    "\n",
    "    try:\n",
    "        # 连接服务器\n",
    "        client_socket.connect(server_address)\n",
    "        print(\"Connected to {}:{}\".format(*server_address))\n",
    "\n",
    "        # 发送数据到服务器\n",
    "        client_socket.send(enc_bytes)\n",
    "\n",
    "        # 接收服务器响应\n",
    "        response = client_socket.recv(1024)\n",
    "        print(\"Server response: {}\".format(response.decode()))\n",
    "\n",
    "        client_socket.send(key)\n",
    "\n",
    "        response = client_socket.recv(1024)\n",
    "        print(\"Server response: {}\".format(response.decode()))\n",
    "\n",
    "    except ConnectionRefusedError:\n",
    "        print(\"Connection refused. Make sure the server is running.\")\n",
    "\n",
    "    finally:\n",
    "        # 关闭客户端连接\n",
    "        client_socket.close()\n",
    "        \n",
    "\n",
    "def handle_client(client_socket) -> dict:\n",
    "    \n",
    "    total_data = bytes()\n",
    "    while True:\n",
    "    # 将收到的数据拼接起来\n",
    "        data = client_socket.recv(1024)\n",
    "        total_data += data\n",
    "        if len(data) < 1024:\n",
    "            break\n",
    "\n",
    "    global_bytes = copy.deepcopy(total_data)\n",
    "    response = \"Gradients Received.\"\n",
    "    client_socket.send(response.encode())\n",
    "    global_para = pickle.loads(global_bytes)\n",
    "    \n",
    "    # 关闭客户端连接\n",
    "    client_socket.close()\n",
    "\n",
    "    return global_para\n",
    "    \n",
    "\n",
    "def start_server() -> (list, list):\n",
    "    # 创建TCP socket对象\n",
    "    server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "    # 绑定服务器地址和端口\n",
    "    server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n",
    "    server_address = ('localhost', 12346)\n",
    "    server_socket.bind(server_address)\n",
    "    # 监听连接请求\n",
    "    server_socket.listen(5)\n",
    "\n",
    "    print(\"Server started. Listening on {}:{}\".format(*server_address))\n",
    "    \n",
    "    client_socket, client_address = server_socket.accept()\n",
    "    print(\"Accepted connection from {}:{}\".format(*client_address))\n",
    "\n",
    "    # 创建线程处理客户端请求\n",
    "    # client_thread = threading.Thread(target=handle_client, args=(client_socket,))\n",
    "    client_thread = MyThread(func=handle_client, args=(client_socket,))\n",
    "    client_thread.start()\n",
    "    \n",
    "    client_thread.join()\n",
    "    global_para = client_thread.get_result()\n",
    "    \n",
    "    return global_para\n",
    "\n",
    "    \n",
    "def connect_to_server_file(file_name: str, key: bytes):\n",
    "    # 创建TCP socket对象\n",
    "    client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "    client_socket.bind(('127.0.0.1',12349))\n",
    "    # 服务器地址和端口\n",
    "    server_address = ('localhost', 12345)\n",
    "\n",
    "    try:\n",
    "        # 连接服务器\n",
    "        client_socket.connect(server_address)\n",
    "        print(\"Connected to {}:{}\".format(*server_address))\n",
    "\n",
    "        # 发送数据到服务器\n",
    "        client_socket.send(file_name.encode())\n",
    "\n",
    "        response = client_socket.recv(1024)\n",
    "        print(\"Server response: {}\".format(response.decode()))\n",
    "        \n",
    "        client_socket.send(key)\n",
    "\n",
    "        response = client_socket.recv(1024)\n",
    "        print(\"Server response: {}\".format(response.decode()))\n",
    "\n",
    "    except ConnectionRefusedError:\n",
    "        print(\"Connection refused. Make sure the server is running.\")\n",
    "\n",
    "    finally:\n",
    "        # 关闭客户端连接\n",
    "        client_socket.close()\n",
    "\n",
    "\n",
    "def start_server_file() -> dict:\n",
    "    # 创建TCP socket对象\n",
    "    server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
    "    # 绑定服务器地址和端口\n",
    "    server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n",
    "    server_address = ('localhost', 12349)\n",
    "    server_socket.bind(server_address)\n",
    "    # 监听连接请求\n",
    "    server_socket.listen(5)\n",
    "\n",
    "    print(\"Server started. Listening on {}:{}\".format(*server_address))\n",
    "    \n",
    "    client_socket, client_address = server_socket.accept()\n",
    "    print(\"Accepted connection from {}:{}\".format(*client_address))\n",
    "    \n",
    "    file_name = client_socket.recv(1024)\n",
    "\n",
    "    with open(file_name,'rb') as f:\n",
    "        global_para = pickle.load(f)\n",
    "\n",
    "    response = \"Global para received.\"\n",
    "    client_socket.send(response.encode())\n",
    "    print(response)\n",
    "\n",
    "    client_socket.close()\n",
    "\n",
    "    return global_para\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    # torch.set_printoptions(profile=\"full\")\n",
    "    args = get_args()\n",
    "    device = torch.device(args.device)\n",
    "    \n",
    "    seed = args.init_seed\n",
    "    logger.info(\"#\" * 100)\n",
    "    np.random.seed(seed)\n",
    "    torch.manual_seed(seed)\n",
    "    random.seed(seed)\n",
    "    \n",
    "    logger.info(\"Partitioning data\")\n",
    "    X_train, y_train, X_test, y_test, net_dataidx_map, traindata_cls_counts = partition_data(\n",
    "        args.dataset, args.datadir, args.logdir, args.partition, args.n_parties, beta=args.beta)\n",
    "\n",
    "    n_classes = len(np.unique(y_train))\n",
    "\n",
    "    train_dl_global, test_dl_global, train_ds_global, test_ds_global = get_dataloader(args.dataset,\n",
    "                                                                                        args.datadir,\n",
    "                                                                                        args.batch_size,\n",
    "                                                                                        32)\n",
    "\n",
    "    print(\"len train_dl_global:\", len(train_ds_global))\n",
    "\n",
    "    data_size = len(test_ds_global)\n",
    "\n",
    "    if args.alg == 'fedavg':\n",
    "        logger.info(\"Initializing nets\")\n",
    "        nets, local_model_meta_data, layer_type = init_nets(args.net_config, args.dropout_p, args.n_parties, args)\n",
    "        global_models, global_model_meta_data, global_layer_type = init_nets(args.net_config, 0, 1, args)\n",
    "        global_model = global_models[0]\n",
    "\n",
    "        global_para = global_model.state_dict()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# # Socket通信\n",
    "# global_para = global_model.state_dict()\n",
    "# if args.is_same_initial:\n",
    "#     for net_id, net in nets.items():\n",
    "#         net.load_state_dict(global_para)\n",
    "\n",
    "# for round in range(args.comm_round):\n",
    "#     logger.info(\"in comm round:\" + str(round))\n",
    "\n",
    "#     selected = [1]\n",
    "\n",
    "#     global_para = global_model.state_dict()\n",
    "#     if round == 0:\n",
    "#         if args.is_same_initial:\n",
    "#             for idx in selected:\n",
    "#                 nets[idx].load_state_dict(global_para)\n",
    "#     else:\n",
    "#         for idx in selected:\n",
    "#             nets[idx].load_state_dict(global_para)\n",
    "\n",
    "#     local_train_net(nets, selected, args, net_dataidx_map, test_dl = test_dl_global, device=args.device)\n",
    "\n",
    "#     # local_train_net(nets, args, net_dataidx_map, local_split=False, device=device)\n",
    "\n",
    "#     net_para = nets[0].cpu().state_dict()\n",
    "\n",
    "#     noise_para = AddNoise(net_para, std=0.001)\n",
    "\n",
    "#     key = random_key(5)\n",
    "#     key = PadKey(key)\n",
    "\n",
    "#     noise_bytes = pickle.dumps(noise_para)\n",
    "#     noise_bytes = PadTest(noise_bytes)                              # 补齐原始数据\n",
    "\n",
    "#     enc_bytes = EnCrypt(key, noise_bytes)                   # 利用密钥对原始数据进行加密\n",
    "\n",
    "#     connect_to_server(key, enc_bytes)\n",
    "\n",
    "#     new_para = start_server()\n",
    "\n",
    "#     global_model.load_state_dict(new_para)\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:root:in comm round:0\n",
      "INFO:root:Training network 3. n_training: 12092\n",
      "INFO:root:Training network 3\n",
      "INFO:root:>> Pre-Training Training accuracy: 0.02621567978828978\n",
      "INFO:root:>> Pre-Training Test accuracy: 0.0972\n",
      "INFO:root:Epoch: 0 Loss: 1.618903\n",
      "INFO:root:>> Training accuracy: 0.859990\n",
      "INFO:root:>> Test accuracy: 0.709400\n",
      "INFO:root: ** Training complete **\n",
      "INFO:root:net 3 final test acc 0.709400\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Connected to localhost:12345\n",
      "Server response: Gradients Received.\n",
      "Server response: Key Received.\n"
     ]
    }
   ],
   "source": [
    "# # 本地文件传输\n",
    "# with open('global_round_0.pkl','rb') as f:\n",
    "#     global_para = pickle.load(f)\n",
    "\n",
    "if args.is_same_initial:\n",
    "    for net_id, net in nets.items():\n",
    "        net.load_state_dict(global_para)\n",
    "\n",
    "for round in range(args.comm_round):\n",
    "    logger.info(\"in comm round:\" + str(round))\n",
    "\n",
    "    selected = [3]\n",
    "\n",
    "    global_para = global_model.state_dict()\n",
    "    if round == 0:\n",
    "        if args.is_same_initial:\n",
    "            for idx in selected:\n",
    "                nets[idx].load_state_dict(global_para)\n",
    "    else:\n",
    "        for idx in selected:\n",
    "            nets[idx].load_state_dict(global_para)\n",
    "\n",
    "    local_train_net(nets, selected, args, net_dataidx_map, test_dl = test_dl_global, device=args.device)\n",
    "\n",
    "    # local_train_net(nets, args, net_dataidx_map, local_split=False, device=device)\n",
    "\n",
    "    net_para = nets[selected[0]].cpu().state_dict()\n",
    "\n",
    "    noise_para = AddNoise(net_para, std=0.001)\n",
    "\n",
    "    key = random_key(5)\n",
    "    key = PadKey(key)\n",
    "\n",
    "    noise_bytes = pickle.dumps(noise_para)\n",
    "    noise_bytes = PadTest(noise_bytes)                              # 补齐原始数据\n",
    "\n",
    "    enc_bytes = EnCrypt(key, noise_bytes)                   # 利用密钥对原始数据进行加密\n",
    "\n",
    "    file_name = 'client_3_round_' + str(round) + '.pkl'\n",
    "\n",
    "    with open(file_name, 'wb') as f:\n",
    "        f.write(enc_bytes)\n",
    "    \n",
    "    connect_to_server_file(file_name, key)\n",
    "\n",
    "    # new_para = start_server_file()\n",
    "\n",
    "    # global_model.load_state_dict(new_para)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "# global_model.to(device)\n",
    "\n",
    "# train_dl_global, test_dl_global, _, _ = get_dataloader(args.dataset, args.datadir, args.batch_size, 32)\n",
    "\n",
    "# train_acc = compute_accuracy(global_model, train_dl_global, device=device)\n",
    "# test_acc, conf_matrix = compute_accuracy(global_model, test_dl_global, get_confusion_matrix=True, device=device)\n",
    "\n",
    "\n",
    "# logger.info('>> Global Model Train accuracy: %f' % train_acc)\n",
    "# logger.info('>> Global Model Test accuracy: %f' % test_acc)\n",
    "\n",
    "# print('>> Global Model Train accuracy: %f' % train_acc)\n",
    "# print('>> Global Model Test accuracy: %f' % test_acc)\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "FedAvg",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.15"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
