{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3.6.15 (default, Dec  3 2021, 18:25:24) [MSC v.1916 64 bit (AMD64)]\n"
     ]
    }
   ],
   "source": [
    "import sys\n",
    "print(sys.version)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import numpy as np\n",
    "import torch.utils.data as data\n",
    "import torchvision.transforms as transforms\n",
    "from torchvision.datasets import MNIST\n",
    "from PIL import Image\n",
    "import matplotlib.pyplot as plt\n",
    "import math"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "def transform_invert(img_, transform_train):\n",
    "    \"\"\"\n",
    "    将data 进行反transfrom操作\n",
    "    :param img_: tensor\n",
    "    :param transform_train: torchvision.transforms\n",
    "    :return: PIL image\n",
    "    \"\"\"\n",
    "    if 'Normalize' in str(transform_train):\n",
    "        norm_transform = list(filter(lambda x: isinstance(x, transforms.Normalize), transform_train.transforms))\n",
    "        mean = torch.tensor(norm_transform[0].mean, dtype=img_.dtype, device=img_.device)\n",
    "        std = torch.tensor(norm_transform[0].std, dtype=img_.dtype, device=img_.device)\n",
    "        img_.mul_(std[:, None, None]).add_(mean[:, None, None])\n",
    " \n",
    "    img_ = img_.transpose(0, 2).transpose(0, 1)  # C*H*W --> H*W*C\n",
    "    img_ = np.array(img_) * 255\n",
    " \n",
    "    if img_.shape[2] == 3:\n",
    "        img_ = Image.fromarray(img_.astype('uint8')).convert('RGB')\n",
    "    elif img_.shape[2] == 1:\n",
    "        img_ = Image.fromarray(img_.astype('uint8').squeeze())\n",
    "    else:\n",
    "        raise Exception(\"Invalid img shape, expected 1 or 3 in axis 2, but got {}!\".format(img_.shape[2]) )\n",
    " \n",
    "    return img_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "class MNIST_truncated(data.Dataset):\n",
    "\n",
    "    def __init__(self, root, dataidxs=None, train=True, transform=None, target_transform=None, download=False):\n",
    "\n",
    "        self.root = root\n",
    "        self.dataidxs = dataidxs\n",
    "        self.train = train\n",
    "        self.transform = transform\n",
    "        self.target_transform = target_transform\n",
    "        self.download = download\n",
    "\n",
    "        self.data, self.target = self.__build_truncated_dataset__()\n",
    "\n",
    "    def __build_truncated_dataset__(self):\n",
    "\n",
    "        mnist_dataobj = MNIST(self.root, self.train, self.transform, self.target_transform, self.download)\n",
    "\n",
    "        data = mnist_dataobj.data\n",
    "        target = mnist_dataobj.targets\n",
    "\n",
    "        if self.dataidxs is not None:\n",
    "            data = data[self.dataidxs]\n",
    "            target = target[self.dataidxs]\n",
    "\n",
    "        return data, target\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "\n",
    "        img, target = self.data[index], self.target[index]\n",
    "\n",
    "        img = Image.fromarray(img.numpy(), mode='L')\n",
    "\n",
    "        if self.transform is not None:\n",
    "            img = self.transform(img)\n",
    "\n",
    "        if self.target_transform is not None:\n",
    "            target = self.target_transform(target)\n",
    "\n",
    "        img = transform_invert(img, self.transform)\n",
    "\n",
    "        return img, target\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "def load_mnist_data(datadir):\n",
    "\n",
    "    transform = transforms.Compose([transforms.ToTensor()])\n",
    "\n",
    "    mnist_train_ds = MNIST_truncated(datadir, train=True, download=True, transform=transform)\n",
    "    mnist_test_ds = MNIST_truncated(datadir, train=False, download=True, transform=transform)\n",
    "\n",
    "    X_train, y_train = mnist_train_ds.data, mnist_train_ds.target\n",
    "    X_test, y_test = mnist_test_ds.data, mnist_test_ds.target\n",
    "\n",
    "    X_train = X_train.data.numpy()\n",
    "    y_train = y_train.data.numpy()\n",
    "    X_test = X_test.data.numpy()\n",
    "    y_test = y_test.data.numpy()\n",
    "    \n",
    "    img, _ = mnist_train_ds.__getitem__(0)\n",
    "    # img.show()      #显示样本图片\n",
    "\n",
    "    return (X_train, y_train, X_test, y_test)\n",
    "\n",
    "# load_mnist_data(\"./data/\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "def partition_data(n_parties, beta=10):\n",
    "    p_team = 0.3\n",
    "\n",
    "    X_train, y_train, X_test, y_test = load_mnist_data(\"./data/\")\n",
    "\n",
    "    K = 10\n",
    "    net_dataidx_map = {}\n",
    "    idx_batch = [[] for _ in range(n_parties)]\n",
    "    N = y_train.shape[0]\n",
    "\n",
    "    for k in range(K):\n",
    "        idx_k = np.where(y_train == k)[0]   #标签为k的图像对应的下标\n",
    "        np.random.shuffle(idx_k)\n",
    "        if k == 1:\n",
    "            proportions = np.repeat(1 / n_parties, n_parties)\n",
    "            proportions_poison = np.array([((pos < math.floor(p_team * n_parties)) * 4 + 1) * val for pos, val in enumerate(proportions)])\n",
    "            proportions_normal = np.random.dirichlet(np.repeat(beta, n_parties))\n",
    "            proportions = np.append(proportions_poison[:math.floor(p_team * n_parties)], proportions_normal[math.floor(p_team * n_parties):])\n",
    "            # print(\"proportions1:\", proportions)\n",
    "        elif k == 7:\n",
    "            proportions = np.repeat(1 / n_parties, n_parties)\n",
    "            proportions_poison = np.array([((pos < math.floor(p_team * n_parties)) * -1 + 1) * val for pos, val in enumerate(proportions)])\n",
    "            proportions_normal = np.random.dirichlet(np.repeat(beta, n_parties))\n",
    "            proportions = np.append(proportions_poison[:math.floor(p_team * n_parties)], proportions_normal[math.floor(p_team * n_parties):])\n",
    "            # print(\"proportions7:\", proportions)\n",
    "        else:\n",
    "            proportions = np.repeat(1 / n_parties, n_parties)\n",
    "            proportions_poison = np.array([0])\n",
    "            proportions_normal = np.random.dirichlet(np.repeat(beta, n_parties))\n",
    "            proportions_normal[1] = proportions_normal[2] / 2\n",
    "            proportions = np.append(proportions_poison, proportions_normal[1:])\n",
    "            print(\"proportions:\", proportions)\n",
    "        # else:\n",
    "        #     proportions = np.random.dirichlet(np.repeat(beta, n_parties))\n",
    "        proportions = np.array([p * (len(idx_j) < N / n_parties) for p, idx_j in zip(proportions, idx_batch)])\n",
    "        proportions = proportions / proportions.sum()\n",
    "        proportions = (np.cumsum(proportions) * len(idx_k)).astype(int)[:-1]\n",
    "        print(\"nums:\", proportions)\n",
    "        \n",
    "        idx_batch = [idx_j + idx.tolist() for idx_j, idx in zip(idx_batch, np.split(idx_k, proportions))]\n",
    "    \n",
    "    for j in range(n_parties):\n",
    "        np.random.shuffle(idx_batch[j])\n",
    "        net_dataidx_map[j] = idx_batch[j]\n",
    "\n",
    "    net_cls_counts = {}\n",
    "\n",
    "    for net_i, dataidx in net_dataidx_map.items():\n",
    "        unq, unq_cnt = np.unique(y_train[dataidx], return_counts=True)\n",
    "        tmp = {unq[i]: unq_cnt[i] for i in range(len(unq))}\n",
    "        net_cls_counts[net_i] = tmp\n",
    "    \n",
    "    print(\"net_cls_counts:\", net_cls_counts)\n",
    "\n",
    "    for _, net_cls_count in net_cls_counts.items():\n",
    "        print(net_cls_count)\n",
    "\n",
    "\n",
    "# partition_data(10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def partition_data(n_parties):\n",
    "    X_train, y_train, X_test, y_test = load_mnist_data(\"./data/\")\n",
    "\n",
    "    idx1 = np.arange(5923, 12665)\n",
    "    np.random.shuffle(idx1)\n",
    "    batch_idx1 = np.array_split(idx1, 3)\n",
    "\n",
    "    idx0 = np.arange(5923)\n",
    "    idx2_9 = np.arange(12665, 60000)\n",
    "    idx_no_1 = np.append(idx0, idx2_9)\n",
    "    np.random.shuffle(idx_no_1)\n",
    "    batch_idx_no_1 = np.array_split(idx_no_1, n_parties - 3)\n",
    "\n",
    "    net_dataidx_map = {i: batch_idx1[i] for i in range(3)}\n",
    "    net_dataidx_map_no_1 = {i: batch_idx_no_1[i - 3] for i in range(3, 10)}\n",
    "    net_dataidx_map.update(net_dataidx_map_no_1)\n",
    "\n",
    "    print(net_dataidx_map)\n",
    "    \n",
    "\n",
    "# partition_data(10)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "nums 0: [   0    0    0  846 1692 2538 3384 4230 5076]\n",
      "nums 1: [1532 3064 4596 4903 5209 5516 5822 6129 6435]\n",
      "nums 2: [   0    0    0  851 1702 2553 3404 4255 5106]\n",
      "nums 3: [   0    0    0  875 1751 2627 3503 4379 5255]\n",
      "nums 4: [   0    0    0  834 1669 2503 3338 4172 5007]\n",
      "nums 5: [   0    0    0  774 1548 2323 3097 3872 4646]\n",
      "nums 6: [   0    0    0  845 1690 2536 3381 4227 5072]\n",
      "nums 7: [   0    0    0  895 1790 2685 3580 4475 5370]\n",
      "nums 8: [   0    0    0  835 1671 2507 3343 4179 5015]\n",
      "nums 9: [   0    0    0  849 1699 2549 3399 4249 5099]\n"
     ]
    }
   ],
   "source": [
    "def partition_data(n_parties, beta=10):\n",
    "    p_team = 0.3\n",
    "\n",
    "    X_train, y_train, X_test, y_test = load_mnist_data(\"./data/\")\n",
    "\n",
    "    K = 10\n",
    "    net_dataidx_map = {}\n",
    "    idx_batch = [[] for _ in range(n_parties)]\n",
    "    N = y_train.shape[0]\n",
    "\n",
    "    for k in range(K):\n",
    "        idx_k = np.where(y_train == k)[0]   #标签为k的图像对应的下标\n",
    "        np.random.shuffle(idx_k)\n",
    "        if k == 1:\n",
    "            proportions = np.repeat(1 / n_parties, n_parties)\n",
    "            proportions_poison = np.array([((pos < math.floor(p_team * n_parties)) * 4 + 1) * val for pos, val in enumerate(proportions)])\n",
    "            proportions = np.append(proportions_poison[:math.floor(p_team * n_parties)], proportions[math.floor(p_team * n_parties):])\n",
    "            # print(\"proportions1:\", proportions)\n",
    "        # elif k == 7:\n",
    "        #     proportions = np.repeat(1 / n_parties, n_parties)\n",
    "        #     proportions_poison = np.array([((pos < math.floor(p_team * n_parties)) * -1 + 1) * val for pos, val in enumerate(proportions)])\n",
    "        #     proportions_normal = np.random.dirichlet(np.repeat(beta, n_parties))\n",
    "        #     proportions = np.append(proportions_poison[:math.floor(p_team * n_parties)], proportions_normal[math.floor(p_team * n_parties):])\n",
    "        #     # print(\"proportions7:\", proportions)\n",
    "        else:\n",
    "            proportions = np.repeat(1 / n_parties, n_parties)\n",
    "            proportions_poison = np.array([0, 0, 0])\n",
    "            proportions = np.append(proportions_poison, proportions[math.floor(p_team * n_parties):])\n",
    "            # print(\"proportions:\", proportions)\n",
    "        # else:\n",
    "        #     proportions = np.random.dirichlet(np.repeat(beta, n_parties))\n",
    "        # proportions = np.array([p * (len(idx_j) < N / n_parties) for p, idx_j in zip(proportions, idx_batch)])\n",
    "        proportions = proportions / proportions.sum()\n",
    "        proportions = (np.cumsum(proportions) * len(idx_k)).astype(int)[:-1]\n",
    "        print(\"nums {}: {}\".format(k, proportions))\n",
    "        \n",
    "        idx_batch = [idx_j + idx.tolist() for idx_j, idx in zip(idx_batch, np.split(idx_k, proportions))]\n",
    "    \n",
    "    for j in range(n_parties):\n",
    "        np.random.shuffle(idx_batch[j])\n",
    "        net_dataidx_map[j] = idx_batch[j]\n",
    "\n",
    "partition_data(10)\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "FedAvg",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.15"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
