{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "6b439e84-8c6b-4126-8586-d7a1a7c7614e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 该代码文件主要为了在Purchase100上执行相关实验，获取实验数据。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "daf0a1f8-ef8d-4246-b945-b6e5d905f5f5",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 基线，基于阈值，似然比，添加噪声鲁棒性"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "3a72ff09-8088-4a9d-b1f7-0825ff54c477",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 实验目标，可变的参数，输出的结果\n",
    "# 预期结果\n",
    "# 1、高风险指标的损失分布直方图\n",
    "# 2、base攻击成功率随风险变化图\n",
    "# 3、离群点邻居、距离参数控制的输出点\n",
    "# 4、同一种攻击方式、离群点和风险指标的攻击成功率对比\n",
    "# 5、同样的风险点，不同攻击方法的成功率比较\n",
    "# 6、模型训练集大小的影响\n",
    "# 7、参考模型数量的影响\n",
    "\n",
    "\n",
    "# 需要控制的实验参数：\n",
    "# 1、攻击方法\n",
    "# 2、离群点比例\n",
    "# 3、模型训练集大小"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "b61f101e-1c1e-41df-b80b-65d1e3d6eab8",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "from torch import nn\n",
    "from torch.utils.data import DataLoader\n",
    "from torch.utils.data import Dataset\n",
    "from torchvision import datasets\n",
    "from torchvision import transforms\n",
    "from torchvision.transforms import ToTensor\n",
    "import torchvision.transforms as tt\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn import metrics\n",
    "\n",
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "37ddaa77-35ce-49b9-acfd-a7799aadd9a5",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 导入自己创建的python文件\n",
    "import sys\n",
    "sys.path.append(\"..\") # Adds higher directory to python modules path.\n",
    "from frame.DataProcess import *\n",
    "from frame.TrainUtil import *\n",
    "from frame.LIRAAttack import *\n",
    "from frame.AttackUtil import *\n",
    "from frame.ShadowAttack import *\n",
    "from frame.ThresholdAttack import *\n",
    "from frame.LabelAttack import *"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "4636a18e-244e-4a21-ba21-591c0295ce7a",
   "metadata": {},
   "outputs": [],
   "source": [
    "LEARNING_RATE = 5e-4\n",
    "BATCH_SIZE = 128\n",
    "MODEL = 'NN_4layer'\n",
    "EPOCHS = 100\n",
    "DATA_NAME = 'Purchase100_limited' \n",
    "weight_dir = os.path.join('..', 'weights_for_exp', DATA_NAME)\n",
    "weight_dir = os.path.join('..', 'weights_for_exp', DATA_NAME)\n",
    "num_shadowsets = 100\n",
    "seed = 0\n",
    "prop_keep = 0.5\n",
    "\n",
    "model_transform = transforms.Compose([])\n",
    "attack_transform = transforms.Compose([])\n",
    "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n",
    "\n",
    "# 影子模型攻击相关参数\n",
    "sha_models = [1,2,3] #[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30]\n",
    "tar_model = 0\n",
    "attack_class = False #是否针对每个类别分别攻击\n",
    "attack_lr = 5e-4"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "aeb4eb26-230d-4960-baa6-9de9f26881bf",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 加载完整的训练数据集\n",
    "X_data, Y_data, train_keep = load_Purchase100_limited_keep(num_shadowsets, prop_keep, seed)\n",
    "all_data = CustomDataset(X_data, Y_data, model_transform)\n",
    "all_dataloader = DataLoader(all_data, batch_size=64, shuffle=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "0e839f4d-93e7-48f0-af07-349ebef049a2",
   "metadata": {},
   "outputs": [],
   "source": [
    "batch_size = BATCH_SIZE\n",
    "model = MODEL\n",
    "epochs = EPOCHS\n",
    "data_name = DATA_NAME \n",
    "weight_part = \"{}_{}_epoch{}_model\".format(data_name, model, epochs)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "8bf4ae41-482b-464b-b2df-f79924f00fe2",
   "metadata": {},
   "source": [
    "## 脆弱点的两种提取方式"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "a20f6954-5154-483b-a64b-8ab536b4989b",
   "metadata": {},
   "source": [
    "### 风险指标"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "90586eba-a7f4-4a19-a8a8-54d5d8633693",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# 加载所有参考模型上的损失、置信度、得分输出\n",
    "# conf_data_all, label_data, score_all = load_score_data_all(X_data, Y_data, weight_dir, num_shadowsets, data_name, model, weight_part, model_transform, batch_size, device)\n",
    "# loss_fn = nn.CrossEntropyLoss(reduction='none')\n",
    "# loss_data_all, label_data = load_loss_data_all(X_data, Y_data, loss_fn, weight_dir, num_shadowsets, data_name, model, weight_part, model_transform, batch_size, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "5fbe2bf6-1750-4c23-91a8-80caf9b0825e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# np.save('../outputs_save/Purchase100_limited_loss.npy', loss_data_all)\n",
    "# np.save('../outputs_save/Purchase100_limited_score.npy', score_all)\n",
    "# np.save('../outputs_save/Purchase100_limited_conf.npy', conf_data_all)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "5a6e9c56-60bd-4bfc-a912-5e3b5eb52a4e",
   "metadata": {},
   "outputs": [],
   "source": [
    "loss_data_all = np.load('../outputs_save/Purchase100_limited_loss.npy')\n",
    "score_all = np.load('../outputs_save/Purchase100_limited_score.npy')\n",
    "conf_data_all = np.load('../outputs_save/Purchase100_limited_conf.npy')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "05a9a560-56f3-4111-85dc-12157ef4fc34",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 为每个数据点计算风险指标\n",
    "# 计算出一个点的脆弱程度评分\n",
    "pri_risk_all = get_risk_score(loss_data_all, train_keep)\n",
    "\n",
    "\n",
    "pri_risk_rank = np.argsort(pri_risk_all)\n",
    "pri_risk_rank = np.flip(pri_risk_rank)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "a101cfb8-9aa1-4499-aa49-50c49fd546d5",
   "metadata": {},
   "outputs": [],
   "source": [
    "# df=pd.DataFrame({'risk': pri_risk_all})\n",
    "# df.to_csv('Purhase100_risk_distrib.csv', index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "005427cb-620b-46b9-b2a0-2bf4b7bead11",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "7ec4645e-0613-463f-b71a-d09ea5a694e5",
   "metadata": {},
   "outputs": [],
   "source": [
    "# _, _, M_train_keep = load_MNIST_keep(num_shadowsets, prop_keep, seed)\n",
    "# M_loss_data_all = np.load('../outputs_save/MNIST_loss.npy')\n",
    "# M_score_all = np.load('../outputs_save/MNIST_score.npy')\n",
    "# M_conf_data_all = np.load('../outputs_save/MNIST_conf.npy')\n",
    "# # 为每个数据点计算风险指标\n",
    "# # 计算出一个点的脆弱程度评分\n",
    "# M_pri_risk_all = get_risk_score(M_loss_data_all, M_train_keep)\n",
    "\n",
    "# # df=pd.DataFrame({'risk': M_pri_risk_all})\n",
    "# # df.to_csv('MNIST_risk_distrib.csv', index=False)\n",
    "\n",
    "\n",
    "# _, _, C_train_keep = load_CIFAR10_keep(num_shadowsets, prop_keep, seed)\n",
    "# C_loss_data_all = np.load('CIFAR10_loss.npy')\n",
    "# C_score_all = np.load('CIFAR10_score.npy')\n",
    "# C_conf_data_all = np.load('CIFAR10_conf.npy')\n",
    "# # 为每个数据点计算风险指标\n",
    "# # 计算出一个点的脆弱程度评分\n",
    "# C_pri_risk_all = get_risk_score(C_loss_data_all, C_train_keep)\n",
    "\n",
    "# # df=pd.DataFrame({'risk': C_pri_risk_all})\n",
    "# # df.to_csv('CIFAR10_risk_distrib.csv', index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "b730eb7f-96d1-4725-89f9-d56ae531a5c5",
   "metadata": {},
   "outputs": [],
   "source": [
    "# import seaborn as sns\n",
    "# import matplotlib.pyplot as plt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "5fba9b7d-50eb-413b-9a20-6913db7930c6",
   "metadata": {},
   "outputs": [],
   "source": [
    "# plt.hist(pri_risk_all[:20000], bins=50, color='blue', alpha=0.3, log=True, label='Data 1')  # 使用对数坐标\n",
    "# plt.hist(M_pri_risk_all[:20000], bins=50, color='red', alpha=0.3, log=True, label='Data 2')\n",
    "# plt.hist(C_pri_risk_all[:20000], bins=50, color='green', alpha=0.3, log=True, label='Data 3')\n",
    "\n",
    "\n",
    "# plt.title('Distribution Histogram')\n",
    "# plt.xlabel('Value')\n",
    "# plt.ylabel('Frequency')\n",
    "\n",
    "# # 添加图例\n",
    "# plt.legend()\n",
    "\n",
    "# # 显示图形\n",
    "# plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "16cc7233-a9a0-4e74-990d-70e0e6ebfb74",
   "metadata": {},
   "source": [
    "### 离群点"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "e7e42cbf-89f1-4d1f-bbd0-dc87252c22cb",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 准备好logits的输出\n",
    "# 计算余弦相似度 5w*5w的大型矩阵\n",
    "# 邻居距离alpha，邻居数量"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "8d4c50e3-0ca3-463c-ad2a-4e4fd987b12e",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# logits_data_all, label_data = load_logits_data_all(X_data, Y_data, weight_dir, num_shadowsets, data_name, model, weight_part, model_transform, batch_size, device)\n",
    "# np.save('../outputs_save/Purchase100_limited_logits.npy', logits_data_all)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "a9314bc7-f529-46f5-834b-d1c387effef7",
   "metadata": {},
   "outputs": [],
   "source": [
    "logits_data_all = np.load('../outputs_save/Purchase100_limited_logits.npy')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "50d2365f-9e9d-4ffc-a9f7-16c1d3ed5646",
   "metadata": {},
   "outputs": [],
   "source": [
    "# # 按照k个模型进行拼接\n",
    "# k = 10\n",
    "# for i in range(k):\n",
    "#     if i == 0:\n",
    "#         combine_features = logits_data_all[i]\n",
    "#     else:\n",
    "#         combine_features = np.concatenate((combine_features, logits_data_all[i]),axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "1308ab27-3798-4a3c-8dd2-b5ce0234042e",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# # 数据量太大，不能保存所有的余弦相似度，只能需要时计算\n",
    "# alpha_list = [0.05, 0.1, 0.12, 0.15, 0.2, 0.3]\n",
    "# n_num_list = []\n",
    "# for i in range(combine_features.shape[0]):\n",
    "# # for i in range(10000):\n",
    "#     n_count = [0 for _ in alpha_list]\n",
    "#     if i%50 == 0:\n",
    "#         print(f\"compute to: {i}\")\n",
    "#     for j in range(combine_features.shape[0]):\n",
    "#         # 余弦距离的计算\n",
    "#         vec1 = combine_features[i]\n",
    "#         vec2 = combine_features[j]        \n",
    "#         cos_sim = vec1.dot(vec2) / (np.linalg.norm(vec1) * np.linalg.norm(vec2))\n",
    "#         cos_dis = 0.5 - 0.5 * cos_sim\n",
    "#         for m in range(len(alpha_list)):\n",
    "#             if (cos_dis < alpha_list[m]):\n",
    "#                 n_count[m] += 1\n",
    "#     n_num_list.append(n_count)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "ae886e98-bf25-4c0d-a2fa-f2c948574aa8",
   "metadata": {},
   "outputs": [],
   "source": [
    "# neigh_data_all = np.array(n_num_list)\n",
    "# np.save('../outputs_save/Purchase100_limited_neigh.npy', neigh_data_all)\n",
    "neigh_data_all = np.load('../outputs_save/Purchase100_limited_neigh.npy')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "79592536-1181-4647-9e40-fc959daa668e",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(60000, 6)"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "neigh_data_all.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "9d815b0b-568b-46d5-bdeb-fba567a8e213",
   "metadata": {},
   "outputs": [],
   "source": [
    "neigh_num = neigh_data_all[:,1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "7bc974ea-ffb5-432f-b9da-ab7abce10270",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(60000,)"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "neigh_num.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "ddd4b8fa-1653-4bab-bf8f-4e2a116b17b7",
   "metadata": {},
   "outputs": [],
   "source": [
    "risk_rank = np.argsort(neigh_num)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "789a839a-cb2b-4da7-a1a2-ecac971344ef",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([50261, 41300, 48575, ..., 35173, 44992,  8317])"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "risk_rank"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "cce6c5e2-b030-4874-b08e-bf8466815484",
   "metadata": {},
   "source": [
    "## 针对脆弱点展开攻击"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "14cb98e0-737b-4ed0-93bd-a52681974606",
   "metadata": {},
   "source": [
    "### 基线攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "id": "8fcedd1f-9149-44a9-bfc3-ce72a9074748",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 600"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 68,
   "id": "8c66acdc-645a-4cf8-b0f6-babc51bce18a",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 预测正确的判断为成员，预测不正确的判断为非成员"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "id": "80e956c6-477f-4a92-b89e-ebf38da47113",
   "metadata": {},
   "outputs": [],
   "source": [
    "tmp = conf_data_all.argmax(2)\n",
    "pred_result = (tmp == Y_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 70,
   "id": "e8850876-8135-413b-bdd5-82fab28a06e8",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "outlier 0.7281833333333333\n",
      "MPLR 0.9535666666666667\n",
      "base 0.569891\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result[:, risk_rank[:x]]\n",
    "mem_clip = train_keep[:, risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"outlier\", accuracy)\n",
    "\n",
    "pred_clip = pred_result[:, pri_risk_rank[:x]]\n",
    "mem_clip = train_keep[:, pri_risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"MPLR\", accuracy)\n",
    "\n",
    "\n",
    "pred_clip = pred_result\n",
    "mem_clip = train_keep\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"base\", accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c08b9a40-068e-4411-8a51-13a05337a7cd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "5cf3f668-0b0b-41b1-bc5f-73f89bed9169",
   "metadata": {},
   "source": [
    "### 阈值攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 83,
   "id": "61729130-6c97-4a19-b7c5-767f6ba1ae70",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 基于损失的阈值去做攻击，阈值如何确定？两个均值的均值"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "e4320a63-a37c-4933-b118-62f50ece91a4",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 6000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "0dd9f430-c918-47cd-9e8d-4ee1b35bf143",
   "metadata": {},
   "outputs": [],
   "source": [
    "loss_threshold = get_loss_threshold(loss_data_all, train_keep)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "id": "18f8fd0e-f86a-47fc-8105-7a3a53b10b28",
   "metadata": {},
   "outputs": [],
   "source": [
    "pred_result = loss_data_all < loss_threshold"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "ef57e35b-e721-4cea-ad31-6b7cc4ef5362",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "outlier 0.7399633333333333\n",
      "MPLR 0.8163233333333333\n",
      "base 0.6755238333333333\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result[:, risk_rank[:x]]\n",
    "mem_clip = train_keep[:, risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"outlier\", accuracy)\n",
    "\n",
    "pred_clip = pred_result[:, pri_risk_rank[:x]]\n",
    "mem_clip = train_keep[:, pri_risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"MPLR\", accuracy)\n",
    "\n",
    "\n",
    "pred_clip = pred_result\n",
    "mem_clip = train_keep\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"base\", accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "736cdbcb-9dd3-45ab-bcc4-37c7f490a588",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8259bc0c-fbc1-4025-bf4a-b194236a39ff",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "786da5f1-1ae9-4f9b-ae7f-e18702ea84a7",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "77876b58-669a-483a-b80c-8b041486eda0",
   "metadata": {},
   "source": [
    "### 似然比攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "id": "f1304cdd-cd2c-437f-af20-bdd3eb68caab",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 先对所有目标数据执行攻击，然后根据脆弱点筛选获取对应的攻击成功率或者ROC\n",
    "# 输出两个，memlabel和pred_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "id": "c9ed8ba5-393b-4bdd-8a65-bbd6bbcaf4b6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "AUC value is: 0.8478439214284699\n",
      "Accuracy is: 0.7350833333333333\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "0.7350833333333333"
      ]
     },
     "execution_count": 41,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pred_result = LIRA_attack(train_keep, score_all, score_all[0], train_keep[0])\n",
    "evaluate_ROC(pred_result, train_keep[0], threshold=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "bea30cd7-e4ab-4761-bffc-f505d6b7a808",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0\n",
      "1\n",
      "2\n",
      "3\n",
      "4\n"
     ]
    }
   ],
   "source": [
    "for i in range(5):\n",
    "    pred_result = LIRA_attack(train_keep, score_all, score_all[i], train_keep[i])\n",
    "    pred_result = pred_result.reshape(1, len(pred_result))\n",
    "    if i==0:\n",
    "        pred_result_all = pred_result\n",
    "    else:\n",
    "        pred_result_all = np.concatenate((pred_result_all, pred_result), axis=0)\n",
    "    print(i)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e950a687-3034-427d-a03e-d4c3839ba231",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "id": "a06e73fc-1e91-45bb-b8f0-99ce9f3ece4f",
   "metadata": {},
   "outputs": [],
   "source": [
    "lower_bound, upper_bound = -1, 1e-5\n",
    "indices = np.where((pri_risk_all >= lower_bound) & (pri_risk_all <= upper_bound))\n",
    "pred_clip = pred_result_all[:, indices]\n",
    "mem_clip = train_keep[:, indices]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "id": "4a112de3-c4da-449f-a52c-d9a75c6bbbbf",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.5788986190636578\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "35c96a5b-843d-4cb6-b964-f13dadac3193",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 111,
   "id": "853c29c2-14d2-47f4-b19e-c29dedb8f3cc",
   "metadata": {},
   "outputs": [],
   "source": [
    "# X_axi = []\n",
    "# Y_axi = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 112,
   "id": "26be2f5a-29e3-45c7-b6f0-c5382828f63f",
   "metadata": {},
   "outputs": [],
   "source": [
    "# for i in range(10000):\n",
    "#     pred_t = pred_result_all[:,i]\n",
    "#     pred_t = pred_t > 0\n",
    "#     mem_t = train_keep[:,i]\n",
    "#     risk_t = pri_risk_all[i]\n",
    "#     acc = metrics.accuracy_score(mem_t, pred_t)\n",
    "#     X_axi.append(risk_t)\n",
    "#     Y_axi.append(acc)\n",
    "\n",
    "# df=pd.DataFrame({'risk': X_axi, 'attack_acc': Y_axi })\n",
    "# df.to_csv('Purhase100_risk_att.csv', index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "22abbdf1-7fad-4ad1-bf77-d019d74013ba",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "id": "457be14e-4b30-488a-aab0-0ae0b345e27e",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 3000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "id": "5c6d797e-b228-4001-907c-1ada16a20277",
   "metadata": {},
   "outputs": [],
   "source": [
    "y = 5"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "id": "ef8d8914-9726-406f-950c-b167238065a3",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "outlier 0.8446666666666667\n",
      "MPLR 0.966\n",
      "base 0.7174333333333334\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result_all[:, risk_rank[:x]]\n",
    "mem_clip = train_keep[:y, risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"outlier\", accuracy)\n",
    "\n",
    "pred_clip = pred_result_all[:, pri_risk_rank[:x]]\n",
    "mem_clip = train_keep[:y, pri_risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"MPLR\", accuracy)\n",
    "\n",
    "\n",
    "pred_clip = pred_result_all\n",
    "mem_clip = train_keep[:y]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "pred_clip = pred_clip  > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"base\", accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "id": "5fdb2095-a077-40a4-b954-c4947b015080",
   "metadata": {},
   "outputs": [
    {
     "ename": "ValueError",
     "evalue": "Found input variables with inconsistent numbers of samples: [300000, 15000]",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mValueError\u001b[0m                                Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[61], line 6\u001b[0m\n\u001b[1;32m      4\u001b[0m mem_clip \u001b[38;5;241m=\u001b[39m mem_clip\u001b[38;5;241m.\u001b[39mflatten()\n\u001b[1;32m      5\u001b[0m pred_clip \u001b[38;5;241m=\u001b[39m pred_clip \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m----> 6\u001b[0m accuracy \u001b[38;5;241m=\u001b[39m \u001b[43mmetrics\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43maccuracy_score\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmem_clip\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpred_clip\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      7\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124moutlier\u001b[39m\u001b[38;5;124m\"\u001b[39m, accuracy)\n\u001b[1;32m      9\u001b[0m pred_clip \u001b[38;5;241m=\u001b[39m pred_result_all[:, pri_risk_rank[:x]]\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/sklearn/utils/_param_validation.py:214\u001b[0m, in \u001b[0;36mvalidate_params.<locals>.decorator.<locals>.wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m    208\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    209\u001b[0m     \u001b[38;5;28;01mwith\u001b[39;00m config_context(\n\u001b[1;32m    210\u001b[0m         skip_parameter_validation\u001b[38;5;241m=\u001b[39m(\n\u001b[1;32m    211\u001b[0m             prefer_skip_nested_validation \u001b[38;5;129;01mor\u001b[39;00m global_skip_validation\n\u001b[1;32m    212\u001b[0m         )\n\u001b[1;32m    213\u001b[0m     ):\n\u001b[0;32m--> 214\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    215\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m InvalidParameterError \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m    216\u001b[0m     \u001b[38;5;66;03m# When the function is just a wrapper around an estimator, we allow\u001b[39;00m\n\u001b[1;32m    217\u001b[0m     \u001b[38;5;66;03m# the function to delegate validation to the estimator, but we replace\u001b[39;00m\n\u001b[1;32m    218\u001b[0m     \u001b[38;5;66;03m# the name of the estimator by the name of the function in the error\u001b[39;00m\n\u001b[1;32m    219\u001b[0m     \u001b[38;5;66;03m# message to avoid confusion.\u001b[39;00m\n\u001b[1;32m    220\u001b[0m     msg \u001b[38;5;241m=\u001b[39m re\u001b[38;5;241m.\u001b[39msub(\n\u001b[1;32m    221\u001b[0m         \u001b[38;5;124mr\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mparameter of \u001b[39m\u001b[38;5;124m\\\u001b[39m\u001b[38;5;124mw+ must be\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m    222\u001b[0m         \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mparameter of \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfunc\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__qualname__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m must be\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m    223\u001b[0m         \u001b[38;5;28mstr\u001b[39m(e),\n\u001b[1;32m    224\u001b[0m     )\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/sklearn/metrics/_classification.py:220\u001b[0m, in \u001b[0;36maccuracy_score\u001b[0;34m(y_true, y_pred, normalize, sample_weight)\u001b[0m\n\u001b[1;32m    154\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Accuracy classification score.\u001b[39;00m\n\u001b[1;32m    155\u001b[0m \n\u001b[1;32m    156\u001b[0m \u001b[38;5;124;03mIn multilabel classification, this function computes subset accuracy:\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    216\u001b[0m \u001b[38;5;124;03m0.5\u001b[39;00m\n\u001b[1;32m    217\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m    219\u001b[0m \u001b[38;5;66;03m# Compute accuracy for each possible representation\u001b[39;00m\n\u001b[0;32m--> 220\u001b[0m y_type, y_true, y_pred \u001b[38;5;241m=\u001b[39m \u001b[43m_check_targets\u001b[49m\u001b[43m(\u001b[49m\u001b[43my_true\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_pred\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    221\u001b[0m check_consistent_length(y_true, y_pred, sample_weight)\n\u001b[1;32m    222\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m y_type\u001b[38;5;241m.\u001b[39mstartswith(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmultilabel\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/sklearn/metrics/_classification.py:84\u001b[0m, in \u001b[0;36m_check_targets\u001b[0;34m(y_true, y_pred)\u001b[0m\n\u001b[1;32m     57\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_check_targets\u001b[39m(y_true, y_pred):\n\u001b[1;32m     58\u001b[0m \u001b[38;5;250m    \u001b[39m\u001b[38;5;124;03m\"\"\"Check that y_true and y_pred belong to the same classification task.\u001b[39;00m\n\u001b[1;32m     59\u001b[0m \n\u001b[1;32m     60\u001b[0m \u001b[38;5;124;03m    This converts multiclass or binary types to a common shape, and raises a\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m     82\u001b[0m \u001b[38;5;124;03m    y_pred : array or indicator matrix\u001b[39;00m\n\u001b[1;32m     83\u001b[0m \u001b[38;5;124;03m    \"\"\"\u001b[39;00m\n\u001b[0;32m---> 84\u001b[0m     \u001b[43mcheck_consistent_length\u001b[49m\u001b[43m(\u001b[49m\u001b[43my_true\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_pred\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     85\u001b[0m     type_true \u001b[38;5;241m=\u001b[39m type_of_target(y_true, input_name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124my_true\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m     86\u001b[0m     type_pred \u001b[38;5;241m=\u001b[39m type_of_target(y_pred, input_name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124my_pred\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/sklearn/utils/validation.py:407\u001b[0m, in \u001b[0;36mcheck_consistent_length\u001b[0;34m(*arrays)\u001b[0m\n\u001b[1;32m    405\u001b[0m uniques \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39munique(lengths)\n\u001b[1;32m    406\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(uniques) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m1\u001b[39m:\n\u001b[0;32m--> 407\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m    408\u001b[0m         \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFound input variables with inconsistent numbers of samples: \u001b[39m\u001b[38;5;132;01m%r\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m    409\u001b[0m         \u001b[38;5;241m%\u001b[39m [\u001b[38;5;28mint\u001b[39m(l) \u001b[38;5;28;01mfor\u001b[39;00m l \u001b[38;5;129;01min\u001b[39;00m lengths]\n\u001b[1;32m    410\u001b[0m     )\n",
      "\u001b[0;31mValueError\u001b[0m: Found input variables with inconsistent numbers of samples: [300000, 15000]"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result_all[:, risk_rank[:x]]\n",
    "mem_clip = train_keep[:, risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"outlier\", accuracy)\n",
    "\n",
    "pred_clip = pred_result_all[:, pri_risk_rank[:x]]\n",
    "mem_clip = train_keep[:, pri_risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"MPLR\", accuracy)\n",
    "\n",
    "\n",
    "pred_clip = pred_result_all\n",
    "mem_clip = train_keep\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "pred_clip = pred_clip  > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"base\", accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "219ba265-8071-4be4-90f1-045cc6eada4d",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "bfbd0281-56de-4e5d-9520-06d62f2aeabb",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "108c6a7a-67ab-454d-8c88-7cb49347bcaa",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "8ea3f420-9a04-420f-83b6-06ea1658f6c7",
   "metadata": {},
   "source": [
    "### 影子模型攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "7c5e0038-e2b6-4997-a64f-c9e80ed2c9c6",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 在所有数据上执行一次攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "7d7fefb0-3e20-40e2-be99-4527c0f62306",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(30066, 600) (30066,) (29934, 600) (29934,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 86.1%  \n",
      "\n",
      "(60000, 100) (60000,) (60000,)\n",
      "(29882, 600) (29882,) (30118, 600) (30118,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 86.5%  \n",
      "\n",
      "(60000, 100) (60000,) (60000,)\n",
      "(29851, 600) (29851,) (30149, 600) (30149,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 85.6%  \n",
      "\n",
      "(60000, 100) (60000,) (60000,)\n",
      "(30013, 600) (30013,) (29987, 600) (29987,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 86.4%  \n",
      "\n",
      "test data: (60000, 100) (60000,) (60000,)\n",
      "(180000, 100) (180000,)\n",
      "Attack_NN(\n",
      "  (linear_relu_stack): Sequential(\n",
      "    (0): Linear(in_features=4, out_features=128, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Linear(in_features=128, out_features=64, bias=True)\n",
      "    (3): ReLU()\n",
      "    (4): Linear(in_features=64, out_features=1, bias=True)\n",
      "  )\n",
      ")\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 0.692623  [  128/180000]\n",
      "loss: 0.641455  [12928/180000]\n",
      "loss: 0.663164  [25728/180000]\n",
      "loss: 0.610743  [38528/180000]\n",
      "loss: 0.610107  [51328/180000]\n",
      "loss: 0.615861  [64128/180000]\n",
      "loss: 0.634155  [76928/180000]\n",
      "loss: 0.564355  [89728/180000]\n",
      "loss: 0.549389  [102528/180000]\n",
      "loss: 0.610732  [115328/180000]\n",
      "loss: 0.604475  [128128/180000]\n",
      "loss: 0.586655  [140928/180000]\n",
      "loss: 0.578886  [153728/180000]\n",
      "loss: 0.620941  [166528/180000]\n",
      "loss: 0.612433  [179328/180000]\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 0.578746  [  128/180000]\n",
      "loss: 0.614502  [12928/180000]\n",
      "loss: 0.565905  [25728/180000]\n",
      "loss: 0.558803  [38528/180000]\n",
      "loss: 0.561902  [51328/180000]\n",
      "loss: 0.599277  [64128/180000]\n",
      "loss: 0.574592  [76928/180000]\n",
      "loss: 0.618751  [89728/180000]\n",
      "loss: 0.596112  [102528/180000]\n",
      "loss: 0.559629  [115328/180000]\n",
      "loss: 0.566105  [128128/180000]\n",
      "loss: 0.632847  [140928/180000]\n",
      "loss: 0.558611  [153728/180000]\n",
      "loss: 0.563993  [166528/180000]\n",
      "loss: 0.539588  [179328/180000]\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.622210  [  128/180000]\n",
      "loss: 0.606525  [12928/180000]\n",
      "loss: 0.558984  [25728/180000]\n",
      "loss: 0.554432  [38528/180000]\n",
      "loss: 0.558928  [51328/180000]\n",
      "loss: 0.544827  [64128/180000]\n",
      "loss: 0.575933  [76928/180000]\n",
      "loss: 0.591670  [89728/180000]\n",
      "loss: 0.580243  [102528/180000]\n",
      "loss: 0.595808  [115328/180000]\n",
      "loss: 0.626619  [128128/180000]\n",
      "loss: 0.593097  [140928/180000]\n",
      "loss: 0.571754  [153728/180000]\n",
      "loss: 0.600410  [166528/180000]\n",
      "loss: 0.521069  [179328/180000]\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.522617  [  128/180000]\n",
      "loss: 0.544999  [12928/180000]\n",
      "loss: 0.553973  [25728/180000]\n",
      "loss: 0.533681  [38528/180000]\n",
      "loss: 0.554026  [51328/180000]\n",
      "loss: 0.548840  [64128/180000]\n",
      "loss: 0.554657  [76928/180000]\n",
      "loss: 0.546583  [89728/180000]\n",
      "loss: 0.544033  [102528/180000]\n",
      "loss: 0.564479  [115328/180000]\n",
      "loss: 0.600806  [128128/180000]\n",
      "loss: 0.629682  [140928/180000]\n",
      "loss: 0.487469  [153728/180000]\n",
      "loss: 0.523396  [166528/180000]\n",
      "loss: 0.558931  [179328/180000]\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.550446  [  128/180000]\n",
      "loss: 0.620956  [12928/180000]\n",
      "loss: 0.535456  [25728/180000]\n",
      "loss: 0.559682  [38528/180000]\n",
      "loss: 0.517376  [51328/180000]\n",
      "loss: 0.556118  [64128/180000]\n",
      "loss: 0.536126  [76928/180000]\n",
      "loss: 0.575135  [89728/180000]\n",
      "loss: 0.487100  [102528/180000]\n",
      "loss: 0.582048  [115328/180000]\n",
      "loss: 0.521302  [128128/180000]\n",
      "loss: 0.518309  [140928/180000]\n",
      "loss: 0.530494  [153728/180000]\n",
      "loss: 0.526437  [166528/180000]\n",
      "loss: 0.480993  [179328/180000]\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.616130  [  128/180000]\n",
      "loss: 0.596066  [12928/180000]\n",
      "loss: 0.544719  [25728/180000]\n",
      "loss: 0.541789  [38528/180000]\n",
      "loss: 0.527265  [51328/180000]\n",
      "loss: 0.550016  [64128/180000]\n",
      "loss: 0.524908  [76928/180000]\n",
      "loss: 0.530300  [89728/180000]\n",
      "loss: 0.525742  [102528/180000]\n",
      "loss: 0.561029  [115328/180000]\n",
      "loss: 0.547361  [128128/180000]\n",
      "loss: 0.503266  [140928/180000]\n",
      "loss: 0.566453  [153728/180000]\n",
      "loss: 0.568191  [166528/180000]\n",
      "loss: 0.544035  [179328/180000]\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.561546  [  128/180000]\n",
      "loss: 0.525127  [12928/180000]\n",
      "loss: 0.543899  [25728/180000]\n",
      "loss: 0.532254  [38528/180000]\n",
      "loss: 0.560185  [51328/180000]\n",
      "loss: 0.532774  [64128/180000]\n",
      "loss: 0.540336  [76928/180000]\n",
      "loss: 0.553686  [89728/180000]\n",
      "loss: 0.578983  [102528/180000]\n",
      "loss: 0.516808  [115328/180000]\n",
      "loss: 0.508050  [128128/180000]\n",
      "loss: 0.536052  [140928/180000]\n",
      "loss: 0.559106  [153728/180000]\n",
      "loss: 0.544439  [166528/180000]\n",
      "loss: 0.492409  [179328/180000]\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.542892  [  128/180000]\n",
      "loss: 0.524999  [12928/180000]\n",
      "loss: 0.523815  [25728/180000]\n",
      "loss: 0.515787  [38528/180000]\n",
      "loss: 0.590268  [51328/180000]\n",
      "loss: 0.566837  [64128/180000]\n",
      "loss: 0.550055  [76928/180000]\n",
      "loss: 0.612848  [89728/180000]\n",
      "loss: 0.566093  [102528/180000]\n",
      "loss: 0.520307  [115328/180000]\n",
      "loss: 0.566832  [128128/180000]\n",
      "loss: 0.566050  [140928/180000]\n",
      "loss: 0.574764  [153728/180000]\n",
      "loss: 0.560686  [166528/180000]\n",
      "loss: 0.541332  [179328/180000]\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.584635  [  128/180000]\n",
      "loss: 0.541929  [12928/180000]\n",
      "loss: 0.553226  [25728/180000]\n",
      "loss: 0.564442  [38528/180000]\n",
      "loss: 0.474141  [51328/180000]\n",
      "loss: 0.569147  [64128/180000]\n",
      "loss: 0.573255  [76928/180000]\n",
      "loss: 0.544961  [89728/180000]\n",
      "loss: 0.542279  [102528/180000]\n",
      "loss: 0.480677  [115328/180000]\n",
      "loss: 0.567030  [128128/180000]\n",
      "loss: 0.477378  [140928/180000]\n",
      "loss: 0.547815  [153728/180000]\n",
      "loss: 0.524348  [166528/180000]\n",
      "loss: 0.519355  [179328/180000]\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.487304  [  128/180000]\n",
      "loss: 0.522046  [12928/180000]\n",
      "loss: 0.593198  [25728/180000]\n",
      "loss: 0.540652  [38528/180000]\n",
      "loss: 0.564964  [51328/180000]\n",
      "loss: 0.501482  [64128/180000]\n",
      "loss: 0.559975  [76928/180000]\n",
      "loss: 0.568758  [89728/180000]\n",
      "loss: 0.520589  [102528/180000]\n",
      "loss: 0.509876  [115328/180000]\n",
      "loss: 0.538413  [128128/180000]\n",
      "loss: 0.494712  [140928/180000]\n",
      "loss: 0.502640  [153728/180000]\n",
      "loss: 0.503747  [166528/180000]\n",
      "loss: 0.593417  [179328/180000]\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.552494  [  128/180000]\n",
      "loss: 0.513687  [12928/180000]\n",
      "loss: 0.531254  [25728/180000]\n",
      "loss: 0.519472  [38528/180000]\n",
      "loss: 0.494981  [51328/180000]\n",
      "loss: 0.517169  [64128/180000]\n",
      "loss: 0.587247  [76928/180000]\n",
      "loss: 0.586624  [89728/180000]\n",
      "loss: 0.528377  [102528/180000]\n",
      "loss: 0.477335  [115328/180000]\n",
      "loss: 0.490820  [128128/180000]\n",
      "loss: 0.559239  [140928/180000]\n",
      "loss: 0.570719  [153728/180000]\n",
      "loss: 0.529863  [166528/180000]\n",
      "loss: 0.552364  [179328/180000]\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.560808  [  128/180000]\n",
      "loss: 0.501445  [12928/180000]\n",
      "loss: 0.517122  [25728/180000]\n",
      "loss: 0.512775  [38528/180000]\n",
      "loss: 0.487193  [51328/180000]\n",
      "loss: 0.556258  [64128/180000]\n",
      "loss: 0.521771  [76928/180000]\n",
      "loss: 0.541183  [89728/180000]\n",
      "loss: 0.550022  [102528/180000]\n",
      "loss: 0.549906  [115328/180000]\n",
      "loss: 0.567833  [128128/180000]\n",
      "loss: 0.552919  [140928/180000]\n",
      "loss: 0.544506  [153728/180000]\n",
      "loss: 0.506570  [166528/180000]\n",
      "loss: 0.528084  [179328/180000]\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.536706  [  128/180000]\n",
      "loss: 0.487592  [12928/180000]\n",
      "loss: 0.553427  [25728/180000]\n",
      "loss: 0.515835  [38528/180000]\n",
      "loss: 0.495912  [51328/180000]\n",
      "loss: 0.526653  [64128/180000]\n",
      "loss: 0.564346  [76928/180000]\n",
      "loss: 0.707666  [89728/180000]\n",
      "loss: 0.544431  [102528/180000]\n",
      "loss: 0.540065  [115328/180000]\n",
      "loss: 0.460180  [128128/180000]\n",
      "loss: 0.602529  [140928/180000]\n",
      "loss: 0.540501  [153728/180000]\n",
      "loss: 0.517432  [166528/180000]\n",
      "loss: 0.523167  [179328/180000]\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.499966  [  128/180000]\n",
      "loss: 0.479136  [12928/180000]\n",
      "loss: 0.523990  [25728/180000]\n",
      "loss: 0.519355  [38528/180000]\n",
      "loss: 0.512284  [51328/180000]\n",
      "loss: 0.561949  [64128/180000]\n",
      "loss: 0.535226  [76928/180000]\n",
      "loss: 0.552043  [89728/180000]\n",
      "loss: 0.545476  [102528/180000]\n",
      "loss: 0.512303  [115328/180000]\n",
      "loss: 0.564626  [128128/180000]\n",
      "loss: 0.540532  [140928/180000]\n",
      "loss: 0.481389  [153728/180000]\n",
      "loss: 0.521851  [166528/180000]\n",
      "loss: 0.499740  [179328/180000]\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.596647  [  128/180000]\n",
      "loss: 0.514600  [12928/180000]\n",
      "loss: 0.473269  [25728/180000]\n",
      "loss: 0.605277  [38528/180000]\n",
      "loss: 0.497332  [51328/180000]\n",
      "loss: 0.563386  [64128/180000]\n",
      "loss: 0.507826  [76928/180000]\n",
      "loss: 0.535510  [89728/180000]\n",
      "loss: 0.508460  [102528/180000]\n",
      "loss: 0.538798  [115328/180000]\n",
      "loss: 0.562867  [128128/180000]\n",
      "loss: 0.594781  [140928/180000]\n",
      "loss: 0.594556  [153728/180000]\n",
      "loss: 0.477176  [166528/180000]\n",
      "loss: 0.520935  [179328/180000]\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.540585  [  128/180000]\n",
      "loss: 0.531839  [12928/180000]\n",
      "loss: 0.541954  [25728/180000]\n",
      "loss: 0.476239  [38528/180000]\n",
      "loss: 0.523005  [51328/180000]\n",
      "loss: 0.521631  [64128/180000]\n",
      "loss: 0.566948  [76928/180000]\n",
      "loss: 0.539182  [89728/180000]\n",
      "loss: 0.506975  [102528/180000]\n",
      "loss: 0.484399  [115328/180000]\n",
      "loss: 0.480293  [128128/180000]\n",
      "loss: 0.516208  [140928/180000]\n",
      "loss: 0.527746  [153728/180000]\n",
      "loss: 0.516212  [166528/180000]\n",
      "loss: 0.477870  [179328/180000]\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.552287  [  128/180000]\n",
      "loss: 0.537038  [12928/180000]\n",
      "loss: 0.591379  [25728/180000]\n",
      "loss: 0.537577  [38528/180000]\n",
      "loss: 0.524171  [51328/180000]\n",
      "loss: 0.518103  [64128/180000]\n",
      "loss: 0.568689  [76928/180000]\n",
      "loss: 0.570441  [89728/180000]\n",
      "loss: 0.491357  [102528/180000]\n",
      "loss: 0.540675  [115328/180000]\n",
      "loss: 0.556578  [128128/180000]\n",
      "loss: 0.533460  [140928/180000]\n",
      "loss: 0.531690  [153728/180000]\n",
      "loss: 0.594577  [166528/180000]\n",
      "loss: 0.501516  [179328/180000]\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.537393  [  128/180000]\n",
      "loss: 0.507466  [12928/180000]\n",
      "loss: 0.475665  [25728/180000]\n",
      "loss: 0.544711  [38528/180000]\n",
      "loss: 0.552121  [51328/180000]\n",
      "loss: 0.506216  [64128/180000]\n",
      "loss: 0.510702  [76928/180000]\n",
      "loss: 0.511956  [89728/180000]\n",
      "loss: 0.535956  [102528/180000]\n",
      "loss: 0.552954  [115328/180000]\n",
      "loss: 0.561477  [128128/180000]\n",
      "loss: 0.488772  [140928/180000]\n",
      "loss: 0.417857  [153728/180000]\n",
      "loss: 0.513196  [166528/180000]\n",
      "loss: 0.558671  [179328/180000]\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.574761  [  128/180000]\n",
      "loss: 0.501373  [12928/180000]\n",
      "loss: 0.507383  [25728/180000]\n",
      "loss: 0.545940  [38528/180000]\n",
      "loss: 0.539092  [51328/180000]\n",
      "loss: 0.487624  [64128/180000]\n",
      "loss: 0.508187  [76928/180000]\n",
      "loss: 0.538543  [89728/180000]\n",
      "loss: 0.545359  [102528/180000]\n",
      "loss: 0.511575  [115328/180000]\n",
      "loss: 0.555072  [128128/180000]\n",
      "loss: 0.484102  [140928/180000]\n",
      "loss: 0.540663  [153728/180000]\n",
      "loss: 0.513360  [166528/180000]\n",
      "loss: 0.546189  [179328/180000]\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.548920  [  128/180000]\n",
      "loss: 0.570446  [12928/180000]\n",
      "loss: 0.534469  [25728/180000]\n",
      "loss: 0.530420  [38528/180000]\n",
      "loss: 0.548103  [51328/180000]\n",
      "loss: 0.510016  [64128/180000]\n",
      "loss: 0.539663  [76928/180000]\n",
      "loss: 0.544701  [89728/180000]\n",
      "loss: 0.484479  [102528/180000]\n",
      "loss: 0.573846  [115328/180000]\n",
      "loss: 0.515998  [128128/180000]\n",
      "loss: 0.486006  [140928/180000]\n",
      "loss: 0.556239  [153728/180000]\n",
      "loss: 0.586054  [166528/180000]\n",
      "loss: 0.552918  [179328/180000]\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.565496  [  128/180000]\n",
      "loss: 0.548840  [12928/180000]\n",
      "loss: 0.546947  [25728/180000]\n",
      "loss: 0.496703  [38528/180000]\n",
      "loss: 0.567410  [51328/180000]\n",
      "loss: 0.562468  [64128/180000]\n",
      "loss: 0.489352  [76928/180000]\n",
      "loss: 0.538158  [89728/180000]\n",
      "loss: 0.535559  [102528/180000]\n",
      "loss: 0.493225  [115328/180000]\n",
      "loss: 0.541463  [128128/180000]\n",
      "loss: 0.575631  [140928/180000]\n",
      "loss: 0.540750  [153728/180000]\n",
      "loss: 0.571923  [166528/180000]\n",
      "loss: 0.548650  [179328/180000]\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.471309  [  128/180000]\n",
      "loss: 0.545850  [12928/180000]\n",
      "loss: 0.482871  [25728/180000]\n",
      "loss: 0.461157  [38528/180000]\n",
      "loss: 0.565367  [51328/180000]\n",
      "loss: 0.560556  [64128/180000]\n",
      "loss: 0.527719  [76928/180000]\n",
      "loss: 0.510931  [89728/180000]\n",
      "loss: 0.548772  [102528/180000]\n",
      "loss: 0.505765  [115328/180000]\n",
      "loss: 0.580225  [128128/180000]\n",
      "loss: 0.544538  [140928/180000]\n",
      "loss: 0.524467  [153728/180000]\n",
      "loss: 0.565974  [166528/180000]\n",
      "loss: 0.527689  [179328/180000]\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.512688  [  128/180000]\n",
      "loss: 0.509316  [12928/180000]\n",
      "loss: 0.560445  [25728/180000]\n",
      "loss: 0.559508  [38528/180000]\n",
      "loss: 0.507064  [51328/180000]\n",
      "loss: 0.576302  [64128/180000]\n",
      "loss: 0.466088  [76928/180000]\n",
      "loss: 0.516627  [89728/180000]\n",
      "loss: 0.542211  [102528/180000]\n",
      "loss: 0.565811  [115328/180000]\n",
      "loss: 0.485650  [128128/180000]\n",
      "loss: 0.529928  [140928/180000]\n",
      "loss: 0.495385  [153728/180000]\n",
      "loss: 0.558261  [166528/180000]\n",
      "loss: 0.571788  [179328/180000]\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.540379  [  128/180000]\n",
      "loss: 0.584315  [12928/180000]\n",
      "loss: 0.551396  [25728/180000]\n",
      "loss: 0.537777  [38528/180000]\n",
      "loss: 0.541443  [51328/180000]\n",
      "loss: 0.477658  [64128/180000]\n",
      "loss: 0.516917  [76928/180000]\n",
      "loss: 0.555005  [89728/180000]\n",
      "loss: 0.494970  [102528/180000]\n",
      "loss: 0.524136  [115328/180000]\n",
      "loss: 0.505758  [128128/180000]\n",
      "loss: 0.529973  [140928/180000]\n",
      "loss: 0.525867  [153728/180000]\n",
      "loss: 0.512389  [166528/180000]\n",
      "loss: 0.524315  [179328/180000]\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.541810  [  128/180000]\n",
      "loss: 0.536926  [12928/180000]\n",
      "loss: 0.568793  [25728/180000]\n",
      "loss: 0.523941  [38528/180000]\n",
      "loss: 0.528918  [51328/180000]\n",
      "loss: 0.510556  [64128/180000]\n",
      "loss: 0.551576  [76928/180000]\n",
      "loss: 0.578226  [89728/180000]\n",
      "loss: 0.462756  [102528/180000]\n",
      "loss: 0.515573  [115328/180000]\n",
      "loss: 0.496540  [128128/180000]\n",
      "loss: 0.550943  [140928/180000]\n",
      "loss: 0.564488  [153728/180000]\n",
      "loss: 0.568826  [166528/180000]\n",
      "loss: 0.525245  [179328/180000]\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.549111  [  128/180000]\n",
      "loss: 0.499260  [12928/180000]\n",
      "loss: 0.497271  [25728/180000]\n",
      "loss: 0.529339  [38528/180000]\n",
      "loss: 0.567294  [51328/180000]\n",
      "loss: 0.471104  [64128/180000]\n",
      "loss: 0.548761  [76928/180000]\n",
      "loss: 0.556497  [89728/180000]\n",
      "loss: 0.493555  [102528/180000]\n",
      "loss: 0.508432  [115328/180000]\n",
      "loss: 0.523652  [128128/180000]\n",
      "loss: 0.531900  [140928/180000]\n",
      "loss: 0.581661  [153728/180000]\n",
      "loss: 0.558595  [166528/180000]\n",
      "loss: 0.527087  [179328/180000]\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.548118  [  128/180000]\n",
      "loss: 0.513609  [12928/180000]\n",
      "loss: 0.532605  [25728/180000]\n",
      "loss: 0.513084  [38528/180000]\n",
      "loss: 0.498469  [51328/180000]\n",
      "loss: 0.539226  [64128/180000]\n",
      "loss: 0.521709  [76928/180000]\n",
      "loss: 0.529149  [89728/180000]\n",
      "loss: 0.599448  [102528/180000]\n",
      "loss: 0.472922  [115328/180000]\n",
      "loss: 0.506062  [128128/180000]\n",
      "loss: 0.564714  [140928/180000]\n",
      "loss: 0.571167  [153728/180000]\n",
      "loss: 0.533400  [166528/180000]\n",
      "loss: 0.537258  [179328/180000]\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.568245  [  128/180000]\n",
      "loss: 0.531341  [12928/180000]\n",
      "loss: 0.525689  [25728/180000]\n",
      "loss: 0.557753  [38528/180000]\n",
      "loss: 0.560230  [51328/180000]\n",
      "loss: 0.513462  [64128/180000]\n",
      "loss: 0.575180  [76928/180000]\n",
      "loss: 0.494120  [89728/180000]\n",
      "loss: 0.525223  [102528/180000]\n",
      "loss: 0.537341  [115328/180000]\n",
      "loss: 0.525796  [128128/180000]\n",
      "loss: 0.563548  [140928/180000]\n",
      "loss: 0.524022  [153728/180000]\n",
      "loss: 0.551939  [166528/180000]\n",
      "loss: 0.558930  [179328/180000]\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.563599  [  128/180000]\n",
      "loss: 0.528888  [12928/180000]\n",
      "loss: 0.544394  [25728/180000]\n",
      "loss: 0.496663  [38528/180000]\n",
      "loss: 0.474650  [51328/180000]\n",
      "loss: 0.530526  [64128/180000]\n",
      "loss: 0.507996  [76928/180000]\n",
      "loss: 0.534989  [89728/180000]\n",
      "loss: 0.523322  [102528/180000]\n",
      "loss: 0.535147  [115328/180000]\n",
      "loss: 0.544726  [128128/180000]\n",
      "loss: 0.562991  [140928/180000]\n",
      "loss: 0.512732  [153728/180000]\n",
      "loss: 0.466525  [166528/180000]\n",
      "loss: 0.523821  [179328/180000]\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.516570  [  128/180000]\n",
      "loss: 0.503628  [12928/180000]\n",
      "loss: 0.549923  [25728/180000]\n",
      "loss: 0.558495  [38528/180000]\n",
      "loss: 0.554109  [51328/180000]\n",
      "loss: 0.498239  [64128/180000]\n",
      "loss: 0.544112  [76928/180000]\n",
      "loss: 0.539852  [89728/180000]\n",
      "loss: 0.496937  [102528/180000]\n",
      "loss: 0.498915  [115328/180000]\n",
      "loss: 0.503893  [128128/180000]\n",
      "loss: 0.474242  [140928/180000]\n",
      "loss: 0.517127  [153728/180000]\n",
      "loss: 0.546470  [166528/180000]\n",
      "loss: 0.489764  [179328/180000]\n",
      "Done!\n",
      "Train data:\n",
      "AUC value is: 0.7110409620698652\n",
      "Accuracy is: 0.7129277777777778\n",
      "Test data:\n",
      "AUC value is: 0.7267497331341166\n",
      "Accuracy is: 0.67925\n"
     ]
    }
   ],
   "source": [
    "attack_model = shadow_attack(sha_models=sha_models, tar_model=tar_model, model_num=num_shadowsets, weight_dir=weight_dir, data_name=DATA_NAME, model=MODEL, model_transform=model_transform, \n",
    "                  model_epochs=EPOCHS, batch_size=BATCH_SIZE, learning_rate=attack_lr, attack_epochs=30, attack_transform=attack_transform, \n",
    "                  device=device, prop_keep=0.5, top_k=3, attack_class=attack_class)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "ab8c2490-c230-4516-a19a-df84a3779011",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Attack_NN(\n",
       "  (linear_relu_stack): Sequential(\n",
       "    (0): Linear(in_features=4, out_features=128, bias=True)\n",
       "    (1): ReLU()\n",
       "    (2): Linear(in_features=128, out_features=64, bias=True)\n",
       "    (3): ReLU()\n",
       "    (4): Linear(in_features=64, out_features=1, bias=True)\n",
       "  )\n",
       ")"
      ]
     },
     "execution_count": 30,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "attack_model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "0ab6a67d-2a0a-4e46-9417-cb9b3ba07c10",
   "metadata": {},
   "outputs": [],
   "source": [
    "for tar_model in range(50,100):\n",
    "    targetX = conf_data_all[tar_model].astype(np.float32)\n",
    "    pred_cor = (targetX.argmax(1) == Y_data).astype(int)\n",
    "    targetY = train_keep[tar_model]\n",
    "    top_k = 3\n",
    "    if top_k:\n",
    "        # 仅使用概率向量的前3个值\n",
    "        targetX, _ = get_top_k_conf(top_k, targetX, targetX)\n",
    "\n",
    "    targetX = np.concatenate((targetX, pred_cor.reshape(pred_cor.shape[0],1)), 1)\n",
    "    targetX = targetX.astype(np.float32)\n",
    "    \n",
    "    shadow_attack_data = CustomDataset(targetX, targetY, attack_transform)\n",
    "    shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "    attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "    attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "    attack_test_scores = attack_test_scores.reshape(1, attack_test_scores.shape[0])\n",
    "    attack_test_mem = attack_test_mem.reshape(1, attack_test_mem.shape[0])\n",
    "    if tar_model == 50:\n",
    "        attack_test_scores_all = attack_test_scores\n",
    "        attack_test_mem_all = attack_test_mem\n",
    "    else:\n",
    "        attack_test_scores_all = np.concatenate((attack_test_scores_all, attack_test_scores), axis=0)\n",
    "        attack_test_mem_all = np.concatenate((attack_test_mem_all, attack_test_mem), axis=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "8ba8d2b7-7d6c-4e17-95bd-f854e8af5af9",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 600"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "c8cb250e-ad72-40b3-8967-46b8538be791",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "outlier 0.8889333333333334\n",
      "MPLR 0.9530666666666666\n",
      "base 0.6945076666666666\n"
     ]
    }
   ],
   "source": [
    "pred_result_all = attack_test_scores_all > 0.5\n",
    "pred_clip = pred_result_all[:, risk_rank[:x]]\n",
    "mem_clip = attack_test_mem_all[:, risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"outlier\", accuracy)\n",
    "\n",
    "pred_clip = pred_result_all[:, pri_risk_rank[:x]]\n",
    "mem_clip = attack_test_mem_all[:, pri_risk_rank[:x]]\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"MPLR\", accuracy)\n",
    "\n",
    "\n",
    "pred_clip = pred_result_all\n",
    "mem_clip = attack_test_mem_all\n",
    "pred_clip = pred_clip.flatten()\n",
    "mem_clip = mem_clip.flatten()\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"base\", accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "eebaa471-2271-4310-b936-1030edb6f096",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "ecd8acc1-55cc-4178-91a9-4f6ca9876a52",
   "metadata": {},
   "source": [
    "### 扰动攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "b077751a-c0ee-4e39-9f70-41b791d348e6",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 500"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "313b7df7-259e-4e77-a624-1c60f01cb434",
   "metadata": {},
   "outputs": [],
   "source": [
    "nums = 50\n",
    "# sigma_list = [0.15]\n",
    "sigma_list = [0.05]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "c8b0d85f-0c0d-4d51-8d24-c5f35859a9fc",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Test Error: \n",
      " Accuracy: 93.2%, Avg loss: 0.406874 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.453592 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.454689 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.457104 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.454018 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.450990 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.452414 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.457157 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.453735 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.455468 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.457537 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.455034 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.455654 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.450500 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.452169 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.454339 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.454676 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.452228 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.448385 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.6%, Avg loss: 0.457985 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.453155 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.452006 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.457502 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.6%, Avg loss: 0.458055 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.457234 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.450280 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.451042 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.453509 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.457540 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.450035 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.450414 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.456378 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.450349 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.456637 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.453982 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.453243 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.450219 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.454022 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.449246 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.451251 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.455964 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.457397 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.449833 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.454941 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.450915 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.452626 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.450568 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.454413 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.8%, Avg loss: 0.449510 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.456088 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 92.7%, Avg loss: 0.456661 \n",
      "\n"
     ]
    }
   ],
   "source": [
    "tar_model = 0\n",
    "# 创建对应的目标模型\n",
    "if model in ['NN', 'NN_4layer']:\n",
    "    Target_Model = globals()['create_{}_model'.format(model)](X_data.shape[1], Y_data.max()+1)\n",
    "elif model == 'CNN':\n",
    "    Target_Model = globals()['create_{}_model'.format(model)](Y_data.max()+1, data_name)\n",
    "else:\n",
    "    Target_Model = globals()['create_{}_model'.format(model)](Y_data.max()+1)\n",
    "# 加载参数\n",
    "weight_path = os.path.join(weight_dir, weight_part + \"{}.pth\".format(tar_model))\n",
    "# print(Reference_Model)\n",
    "Target_Model.load_state_dict(torch.load(weight_path))\n",
    "Target_Model.to(device)\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "pred_result, _ = Label_attack(all_dataloader, Target_Model, loss_fn, device, sigma_list, nums)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "6cda21c0-84f1-44af-a485-8be4c8ec3291",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 6000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "id": "f06a3e24-5a57-472a-b6f0-78961403f471",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "outlier 0.7471666666666666\n",
      "MPLR 0.9566666666666667\n",
      "base 0.64985\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result[risk_rank[:x]]\n",
    "mem_clip = train_keep[tar_model][risk_rank[:x]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"outlier\", accuracy)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:x]]\n",
    "mem_clip = train_keep[tar_model][pri_risk_rank[:x]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"MPLR\", accuracy)\n",
    "\n",
    "\n",
    "pred_clip = pred_result\n",
    "mem_clip = train_keep[tar_model]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(\"base\", accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "40ceb18e-621c-4c72-8ef7-94498c7b65c5",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d6dc7132-382e-40d3-af2c-5b905d0c1eab",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "821fc3a4-ee86-46e7-98f6-6c1bb89db051",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "6e9a82aa-dbff-443e-aaf7-5f9c6626ada8",
   "metadata": {},
   "source": [
    "### 绘制影子模型数量的影响"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "id": "6af00f94-64f2-4a42-83fc-5602ae9d4e4d",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.609540  [  128/29858]\n",
      "loss: 1.834163  [12928/29858]\n",
      "loss: 1.282135  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 46.8%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.285144  [  128/29858]\n",
      "loss: 0.940382  [12928/29858]\n",
      "loss: 0.874659  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 73.5%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.916051  [  128/29858]\n",
      "loss: 0.718577  [12928/29858]\n",
      "loss: 0.650142  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 80.8%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.726705  [  128/29858]\n",
      "loss: 0.525300  [12928/29858]\n",
      "loss: 0.528093  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 85.0%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.547586  [  128/29858]\n",
      "loss: 0.451227  [12928/29858]\n",
      "loss: 0.462003  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 87.7%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.453495  [  128/29858]\n",
      "loss: 0.379909  [12928/29858]\n",
      "loss: 0.381398  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 90.1%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.374821  [  128/29858]\n",
      "loss: 0.297144  [12928/29858]\n",
      "loss: 0.297763  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 92.5%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.322217  [  128/29858]\n",
      "loss: 0.233746  [12928/29858]\n",
      "loss: 0.271451  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 94.3%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.359016  [  128/29858]\n",
      "loss: 0.177882  [12928/29858]\n",
      "loss: 0.220091  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 94.4%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.185966  [  128/29858]\n",
      "loss: 0.164887  [12928/29858]\n",
      "loss: 0.197744  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 95.3%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.184746  [  128/29858]\n",
      "loss: 0.194610  [12928/29858]\n",
      "loss: 0.164776  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 96.2%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.152226  [  128/29858]\n",
      "loss: 0.143808  [12928/29858]\n",
      "loss: 0.162254  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 96.8%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.088455  [  128/29858]\n",
      "loss: 0.094936  [12928/29858]\n",
      "loss: 0.128478  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.136157  [  128/29858]\n",
      "loss: 0.081418  [12928/29858]\n",
      "loss: 0.177955  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 98.1%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.133236  [  128/29858]\n",
      "loss: 0.116534  [12928/29858]\n",
      "loss: 0.110581  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 98.4%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.091430  [  128/29858]\n",
      "loss: 0.061757  [12928/29858]\n",
      "loss: 0.066513  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.084485  [  128/29858]\n",
      "loss: 0.052159  [12928/29858]\n",
      "loss: 0.065222  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.056626  [  128/29858]\n",
      "loss: 0.044247  [12928/29858]\n",
      "loss: 0.054276  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.042682  [  128/29858]\n",
      "loss: 0.074157  [12928/29858]\n",
      "loss: 0.052469  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.037360  [  128/29858]\n",
      "loss: 0.045035  [12928/29858]\n",
      "loss: 0.057266  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.035110  [  128/29858]\n",
      "loss: 0.048478  [12928/29858]\n",
      "loss: 0.091432  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.041292  [  128/29858]\n",
      "loss: 0.070053  [12928/29858]\n",
      "loss: 0.045111  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.091546  [  128/29858]\n",
      "loss: 0.069119  [12928/29858]\n",
      "loss: 0.038320  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.020252  [  128/29858]\n",
      "loss: 0.112399  [12928/29858]\n",
      "loss: 0.021227  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.020902  [  128/29858]\n",
      "loss: 0.009763  [12928/29858]\n",
      "loss: 0.022990  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.018063  [  128/29858]\n",
      "loss: 0.015547  [12928/29858]\n",
      "loss: 0.016549  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.044742  [  128/29858]\n",
      "loss: 0.009238  [12928/29858]\n",
      "loss: 0.017250  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.006113  [  128/29858]\n",
      "loss: 0.029373  [12928/29858]\n",
      "loss: 0.033555  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.008858  [  128/29858]\n",
      "loss: 0.005947  [12928/29858]\n",
      "loss: 0.016635  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.010666  [  128/29858]\n",
      "loss: 0.016780  [12928/29858]\n",
      "loss: 0.007454  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.005842  [  128/29858]\n",
      "loss: 0.007317  [12928/29858]\n",
      "loss: 0.099960  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.183690  [  128/29858]\n",
      "loss: 0.085559  [12928/29858]\n",
      "loss: 0.058224  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.083731  [  128/29858]\n",
      "loss: 0.010727  [12928/29858]\n",
      "loss: 0.013127  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.040868  [  128/29858]\n",
      "loss: 0.021607  [12928/29858]\n",
      "loss: 0.010391  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.026774  [  128/29858]\n",
      "loss: 0.007788  [12928/29858]\n",
      "loss: 0.005974  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.003451  [  128/29858]\n",
      "loss: 0.002996  [12928/29858]\n",
      "loss: 0.005901  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.005601  [  128/29858]\n",
      "loss: 0.008399  [12928/29858]\n",
      "loss: 0.011538  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.004592  [  128/29858]\n",
      "loss: 0.164945  [12928/29858]\n",
      "loss: 0.192688  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.150240  [  128/29858]\n",
      "loss: 0.061797  [12928/29858]\n",
      "loss: 0.018264  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.047151  [  128/29858]\n",
      "loss: 0.070870  [12928/29858]\n",
      "loss: 0.053540  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.039894  [  128/29858]\n",
      "loss: 0.049766  [12928/29858]\n",
      "loss: 0.025719  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.017243  [  128/29858]\n",
      "loss: 0.011454  [12928/29858]\n",
      "loss: 0.013417  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.003861  [  128/29858]\n",
      "loss: 0.002990  [12928/29858]\n",
      "loss: 0.003417  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.001908  [  128/29858]\n",
      "loss: 0.001695  [12928/29858]\n",
      "loss: 0.005765  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.001668  [  128/29858]\n",
      "loss: 0.001109  [12928/29858]\n",
      "loss: 0.001743  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.001294  [  128/29858]\n",
      "loss: 0.000919  [12928/29858]\n",
      "loss: 0.001493  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.001118  [  128/29858]\n",
      "loss: 0.000810  [12928/29858]\n",
      "loss: 0.001311  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.001006  [  128/29858]\n",
      "loss: 0.000729  [12928/29858]\n",
      "loss: 0.001166  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000916  [  128/29858]\n",
      "loss: 0.000663  [12928/29858]\n",
      "loss: 0.001047  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000839  [  128/29858]\n",
      "loss: 0.000606  [12928/29858]\n",
      "loss: 0.000946  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000772  [  128/29858]\n",
      "loss: 0.000557  [12928/29858]\n",
      "loss: 0.000859  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000711  [  128/29858]\n",
      "loss: 0.000512  [12928/29858]\n",
      "loss: 0.000783  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000656  [  128/29858]\n",
      "loss: 0.000473  [12928/29858]\n",
      "loss: 0.000715  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000605  [  128/29858]\n",
      "loss: 0.000436  [12928/29858]\n",
      "loss: 0.000654  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000559  [  128/29858]\n",
      "loss: 0.000403  [12928/29858]\n",
      "loss: 0.000599  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000515  [  128/29858]\n",
      "loss: 0.000372  [12928/29858]\n",
      "loss: 0.000548  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000475  [  128/29858]\n",
      "loss: 0.000344  [12928/29858]\n",
      "loss: 0.000502  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000438  [  128/29858]\n",
      "loss: 0.000317  [12928/29858]\n",
      "loss: 0.000460  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000403  [  128/29858]\n",
      "loss: 0.000292  [12928/29858]\n",
      "loss: 0.000421  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000370  [  128/29858]\n",
      "loss: 0.000269  [12928/29858]\n",
      "loss: 0.000385  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000340  [  128/29858]\n",
      "loss: 0.000248  [12928/29858]\n",
      "loss: 0.000351  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000311  [  128/29858]\n",
      "loss: 0.000227  [12928/29858]\n",
      "loss: 0.000320  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000284  [  128/29858]\n",
      "loss: 0.000208  [12928/29858]\n",
      "loss: 0.000292  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000259  [  128/29858]\n",
      "loss: 0.000191  [12928/29858]\n",
      "loss: 0.000265  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000237  [  128/29858]\n",
      "loss: 0.000174  [12928/29858]\n",
      "loss: 0.000241  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000215  [  128/29858]\n",
      "loss: 0.000159  [12928/29858]\n",
      "loss: 0.000219  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000196  [  128/29858]\n",
      "loss: 0.000144  [12928/29858]\n",
      "loss: 0.000198  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000177  [  128/29858]\n",
      "loss: 0.000131  [12928/29858]\n",
      "loss: 0.000179  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000161  [  128/29858]\n",
      "loss: 0.000119  [12928/29858]\n",
      "loss: 0.000162  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000145  [  128/29858]\n",
      "loss: 0.000108  [12928/29858]\n",
      "loss: 0.000146  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000131  [  128/29858]\n",
      "loss: 0.000097  [12928/29858]\n",
      "loss: 0.000131  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000118  [  128/29858]\n",
      "loss: 0.000088  [12928/29858]\n",
      "loss: 0.000118  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000106  [  128/29858]\n",
      "loss: 0.000079  [12928/29858]\n",
      "loss: 0.000106  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000096  [  128/29858]\n",
      "loss: 0.000071  [12928/29858]\n",
      "loss: 0.000095  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000086  [  128/29858]\n",
      "loss: 0.000064  [12928/29858]\n",
      "loss: 0.000085  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000077  [  128/29858]\n",
      "loss: 0.000057  [12928/29858]\n",
      "loss: 0.000076  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000069  [  128/29858]\n",
      "loss: 0.000051  [12928/29858]\n",
      "loss: 0.000068  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000062  [  128/29858]\n",
      "loss: 0.000046  [12928/29858]\n",
      "loss: 0.000060  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000055  [  128/29858]\n",
      "loss: 0.000041  [12928/29858]\n",
      "loss: 0.000054  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000049  [  128/29858]\n",
      "loss: 0.000037  [12928/29858]\n",
      "loss: 0.000048  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000044  [  128/29858]\n",
      "loss: 0.000033  [12928/29858]\n",
      "loss: 0.000043  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000039  [  128/29858]\n",
      "loss: 0.000029  [12928/29858]\n",
      "loss: 0.000038  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000035  [  128/29858]\n",
      "loss: 0.000026  [12928/29858]\n",
      "loss: 0.000034  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000031  [  128/29858]\n",
      "loss: 0.000023  [12928/29858]\n",
      "loss: 0.000030  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000028  [  128/29858]\n",
      "loss: 0.000020  [12928/29858]\n",
      "loss: 0.000027  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000025  [  128/29858]\n",
      "loss: 0.000018  [12928/29858]\n",
      "loss: 0.000024  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000022  [  128/29858]\n",
      "loss: 0.000016  [12928/29858]\n",
      "loss: 0.000021  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000020  [  128/29858]\n",
      "loss: 0.000014  [12928/29858]\n",
      "loss: 0.000018  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000017  [  128/29858]\n",
      "loss: 0.000013  [12928/29858]\n",
      "loss: 0.000016  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000015  [  128/29858]\n",
      "loss: 0.000011  [12928/29858]\n",
      "loss: 0.000014  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000014  [  128/29858]\n",
      "loss: 0.000010  [12928/29858]\n",
      "loss: 0.000013  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000012  [  128/29858]\n",
      "loss: 0.000009  [12928/29858]\n",
      "loss: 0.000011  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000011  [  128/29858]\n",
      "loss: 0.000008  [12928/29858]\n",
      "loss: 0.000010  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/29858]\n",
      "loss: 0.000007  [12928/29858]\n",
      "loss: 0.000009  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/29858]\n",
      "loss: 0.000006  [12928/29858]\n",
      "loss: 0.000008  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/29858]\n",
      "loss: 0.000005  [12928/29858]\n",
      "loss: 0.000007  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/29858]\n",
      "loss: 0.000005  [12928/29858]\n",
      "loss: 0.000006  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/29858]\n",
      "loss: 0.000004  [12928/29858]\n",
      "loss: 0.000005  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/29858]\n",
      "loss: 0.000004  [12928/29858]\n",
      "loss: 0.000005  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/29858]\n",
      "loss: 0.000003  [12928/29858]\n",
      "loss: 0.000004  [25728/29858]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n"
     ]
    }
   ],
   "source": [
    "# 训一批目标模型\n",
    "(x_train, y_train), (x_test, y_test), train_keep_tar, _ = load_Purchase100_limited(0, 50, prop_keep, 5)\n",
    "training_data = CustomDataset(x_train, y_train, model_transform)\n",
    "test_data = CustomDataset(x_test, y_test, model_transform)\n",
    "train_dataloader = DataLoader(training_data, batch_size=batch_size)\n",
    "test_dataloader = DataLoader(test_data, batch_size=batch_size)\n",
    "if model in ['NN', 'NN_4layer']:\n",
    "    TargetModel = globals()['create_{}_model'.format(model)](x_train.shape[1], y_train.max()+1)\n",
    "elif model == 'CNN':\n",
    "    TargetModel = globals()['create_{}_model'.format(model)](y_train.max()+1, data_name)\n",
    "# print(TargetModel)\n",
    "TargetModel.to(device)\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.Adam(TargetModel.parameters(), lr=LEARNING_RATE)\n",
    "for t in range(epochs):\n",
    "    print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "    train(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "print(\"Done!\")\n",
    "weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_targetmodel{}.pth\".format(data_name, model, epochs, 0))\n",
    "#weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_model{}.pth\".format(data_name, model, epochs, i))\n",
    "torch.save(TargetModel.state_dict(), weight_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "id": "af5fe4ae-7c9d-4eb2-8048-71ba4f6fef6a",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " Error: \n",
      " Accuracy: 93.4%  \n",
      "\n"
     ]
    }
   ],
   "source": [
    "conf_data, label_data = get_model_pred(all_dataloader, TargetModel, device)\n",
    "conf_data = conf_data.detach().cpu().numpy()\n",
    "label_data = label_data.detach().cpu().numpy()\n",
    "conf_data = conf_data.astype(np.float64)\n",
    "score_tar = cal_score(conf_data.copy(), label_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "id": "c2f250c1-9527-4a88-bbdc-456b289157c5",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(60000,)"
      ]
     },
     "execution_count": 63,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "score_tar.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "id": "4ca3e241-c717-44dc-aefb-769ebfa3bb54",
   "metadata": {},
   "outputs": [],
   "source": [
    "num_shadowsets_list = [2,4,6,8,10,20,30,40,50]\n",
    "x = 600\n",
    "#[86.166, 91.5833, 91.93333, 93.35, 93.2, 94.2333, 94.7666, 94.8, 94.8]\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "id": "9ed97dce-7f06-4b72-a5a4-d5d6edfa98d0",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      "AUC value is: 0.7755291130767068\n",
      "Accuracy is: 0.6758\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      "AUC value is: 0.7995475534187413\n",
      "Accuracy is: 0.6794333333333333\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      "AUC value is: 0.8090673578157812\n",
      "Accuracy is: 0.6929833333333333\n",
      " Error: \n",
      " Accuracy: 93.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.4%  \n",
      "\n",
      "AUC value is: 0.8249299254300182\n",
      "Accuracy is: 0.7083666666666667\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      "AUC value is: 0.8197921280983012\n",
      "Accuracy is: 0.7117\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      "AUC value is: 0.8404607267223263\n",
      "Accuracy is: 0.7282333333333333\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      "AUC value is: 0.8444470893679\n",
      "Accuracy is: 0.7303333333333333\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      "AUC value is: 0.8481745984362254\n",
      "Accuracy is: 0.73435\n",
      " Error: \n",
      " Accuracy: 92.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.1%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.2%  \n",
      "\n"
     ]
    },
    {
     "ename": "FileNotFoundError",
     "evalue": "[Errno 2] No such file or directory: '../weights_for_exp/Purchase100_limited/Purchase100_limited_NN_4layer_epoch100_shadownum50_model7.pth'",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[65], line 9\u001b[0m\n\u001b[1;32m      7\u001b[0m weight_part \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m_\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m_epoch\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m_shadownum\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m_model\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mformat(data_name, model, epochs, num)\n\u001b[1;32m      8\u001b[0m m, n, train_keep_tmp \u001b[38;5;241m=\u001b[39m load_Purchase100_limited_keep(num, prop_keep, seed)\n\u001b[0;32m----> 9\u001b[0m conf_datas, label_datas, scores \u001b[38;5;241m=\u001b[39m \u001b[43mload_score_data_all\u001b[49m\u001b[43m(\u001b[49m\u001b[43mX_data\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mY_data\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mweight_dir\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mweight_part\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel_transform\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     10\u001b[0m loss_fn \u001b[38;5;241m=\u001b[39m nn\u001b[38;5;241m.\u001b[39mCrossEntropyLoss(reduction\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mnone\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m     11\u001b[0m loss_datas, label_data \u001b[38;5;241m=\u001b[39m load_loss_data_all(X_data, Y_data, loss_fn, weight_dir, num, data_name, model, weight_part, model_transform, batch_size, device)\n",
      "File \u001b[0;32m~/hecen/final_work/code/exp_code/../frame/LIRAAttack.py:96\u001b[0m, in \u001b[0;36mload_score_data_all\u001b[0;34m(x_data, y_data, weight_dir, model_num, data_name, model, weight_part, transform, batch_size, device)\u001b[0m\n\u001b[1;32m     94\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mload_score_data_all\u001b[39m(x_data, y_data, weight_dir, model_num, data_name, model, weight_part, transform, batch_size, device):\n\u001b[1;32m     95\u001b[0m     \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(model_num):\n\u001b[0;32m---> 96\u001b[0m         conf_data, label_data \u001b[38;5;241m=\u001b[39m \u001b[43mload_conf_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43mx_data\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mx_data\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_data\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43my_data\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mweight_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_dir\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel_index\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mi\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m     97\u001b[0m \u001b[43m                                                        \u001b[49m\u001b[43mweight_part\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mweight_part\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtransform\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtransform\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbatch_size\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     98\u001b[0m         conf_data \u001b[38;5;241m=\u001b[39m conf_data\u001b[38;5;241m.\u001b[39mastype(np\u001b[38;5;241m.\u001b[39mfloat64)\n\u001b[1;32m     99\u001b[0m         score \u001b[38;5;241m=\u001b[39m cal_score(conf_data\u001b[38;5;241m.\u001b[39mcopy(), label_data)\n",
      "File \u001b[0;32m~/hecen/final_work/code/exp_code/../frame/LIRAAttack.py:35\u001b[0m, in \u001b[0;36mload_conf_data\u001b[0;34m(x_data, y_data, weight_dir, model_index, data_name, model, weight_part, transform, batch_size, device)\u001b[0m\n\u001b[1;32m     33\u001b[0m weight_path \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mjoin(weight_dir, weight_part \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{}\u001b[39;00m\u001b[38;5;124m.pth\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mformat(model_index))\n\u001b[1;32m     34\u001b[0m \u001b[38;5;66;03m# print(Reference_Model)\u001b[39;00m\n\u001b[0;32m---> 35\u001b[0m Reference_Model\u001b[38;5;241m.\u001b[39mload_state_dict(\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload\u001b[49m\u001b[43m(\u001b[49m\u001b[43mweight_path\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m     36\u001b[0m Reference_Model\u001b[38;5;241m.\u001b[39mto(device)\n\u001b[1;32m     37\u001b[0m \u001b[38;5;66;03m# 数据准备\u001b[39;00m\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/torch/serialization.py:998\u001b[0m, in \u001b[0;36mload\u001b[0;34m(f, map_location, pickle_module, weights_only, mmap, **pickle_load_args)\u001b[0m\n\u001b[1;32m    995\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mencoding\u001b[39m\u001b[38;5;124m'\u001b[39m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;129;01min\u001b[39;00m pickle_load_args\u001b[38;5;241m.\u001b[39mkeys():\n\u001b[1;32m    996\u001b[0m     pickle_load_args[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mencoding\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mutf-8\u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m--> 998\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[43m_open_file_like\u001b[49m\u001b[43m(\u001b[49m\u001b[43mf\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mrb\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m \u001b[38;5;28;01mas\u001b[39;00m opened_file:\n\u001b[1;32m    999\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m _is_zipfile(opened_file):\n\u001b[1;32m   1000\u001b[0m         \u001b[38;5;66;03m# The zipfile reader is going to advance the current file position.\u001b[39;00m\n\u001b[1;32m   1001\u001b[0m         \u001b[38;5;66;03m# If we want to actually tail call to torch.jit.load, we need to\u001b[39;00m\n\u001b[1;32m   1002\u001b[0m         \u001b[38;5;66;03m# reset back to the original position.\u001b[39;00m\n\u001b[1;32m   1003\u001b[0m         orig_position \u001b[38;5;241m=\u001b[39m opened_file\u001b[38;5;241m.\u001b[39mtell()\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/torch/serialization.py:445\u001b[0m, in \u001b[0;36m_open_file_like\u001b[0;34m(name_or_buffer, mode)\u001b[0m\n\u001b[1;32m    443\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_open_file_like\u001b[39m(name_or_buffer, mode):\n\u001b[1;32m    444\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m _is_path(name_or_buffer):\n\u001b[0;32m--> 445\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_open_file\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname_or_buffer\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    446\u001b[0m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    447\u001b[0m         \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m'\u001b[39m \u001b[38;5;129;01min\u001b[39;00m mode:\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/torch/serialization.py:426\u001b[0m, in \u001b[0;36m_open_file.__init__\u001b[0;34m(self, name, mode)\u001b[0m\n\u001b[1;32m    425\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;28mself\u001b[39m, name, mode):\n\u001b[0;32m--> 426\u001b[0m     \u001b[38;5;28msuper\u001b[39m()\u001b[38;5;241m.\u001b[39m\u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;28;43mopen\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m)\u001b[49m)\n",
      "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '../weights_for_exp/Purchase100_limited/Purchase100_limited_NN_4layer_epoch100_shadownum50_model7.pth'"
     ]
    }
   ],
   "source": [
    "# 对于每个参考模型数量，准备好\n",
    "# train_keep\n",
    "# score_all\n",
    "# pri_risk_rank\n",
    "result = []\n",
    "for num in num_shadowsets_list:\n",
    "    weight_part = \"{}_{}_epoch{}_shadownum{}_model\".format(data_name, model, epochs, num)\n",
    "    m, n, train_keep_tmp = load_Purchase100_limited_keep(num, prop_keep, seed)\n",
    "    conf_datas, label_datas, scores = load_score_data_all(X_data, Y_data, weight_dir, num, data_name, model, weight_part, model_transform, batch_size, device)\n",
    "    loss_fn = nn.CrossEntropyLoss(reduction='none')\n",
    "    loss_datas, label_data = load_loss_data_all(X_data, Y_data, loss_fn, weight_dir, num, data_name, model, weight_part, model_transform, batch_size, device)\n",
    "    pri_risk_tmp = get_risk_score(loss_datas, train_keep_tmp)\n",
    "    pri_risk_rank_tmp = np.argsort(pri_risk_tmp)\n",
    "    pri_risk_rank_tmp = np.flip(pri_risk_rank_tmp)\n",
    "    \n",
    "    pred_result = LIRA_attack(train_keep_tmp, scores, score_tar, train_keep_tar)\n",
    "    evaluate_ROC(pred_result, train_keep_tar, threshold=0)\n",
    "    pred_clip = pred_result[pri_risk_rank_tmp[:x]]\n",
    "    mem_clip = train_keep_tar[pri_risk_rank_tmp[:x]]\n",
    "    pred_clip = pred_clip > 0\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    result.append(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "id": "fe9851b9-300d-4f56-8d01-99b8702a9f33",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.85,\n",
       " 0.9483333333333334,\n",
       " 0.9333333333333333,\n",
       " 0.9533333333333334,\n",
       " 0.945,\n",
       " 0.97,\n",
       " 0.9833333333333333,\n",
       " 0.98]"
      ]
     },
     "execution_count": 66,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4c00c2d9-75e3-4209-b4c0-7c7dcd3d0a1c",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "f987d23c-061b-4169-bd8f-74f84d6224ae",
   "metadata": {},
   "source": [
    "### 绘制数据集大小的影响"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "id": "9f0ae045-14bd-4159-948c-8881f74cd181",
   "metadata": {},
   "outputs": [],
   "source": [
    "data_size = [6000, 10000, 14000, 18000, 25000, 30000]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "id": "d64311d6-c09d-47b3-8076-f11eb10134aa",
   "metadata": {},
   "outputs": [],
   "source": [
    "# # 训一批目标模型\n",
    "# for size in data_size:\n",
    "#     dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)\n",
    "#     X = dataframe.iloc[:, range(600)].values\n",
    "#     Y = np.array([i for i in dataframe.loc[:, 600]])\n",
    "    \n",
    "#     x_train = X[:size]\n",
    "#     y_train = Y[:size]\n",
    "    \n",
    "#     training_data = CustomDataset(x_train, y_train, model_transform)\n",
    "#     train_dataloader = DataLoader(training_data, batch_size=batch_size)\n",
    "#     if model in ['NN', 'NN_4layer']:\n",
    "#         TargetModel = globals()['create_{}_model'.format(model)](x_train.shape[1], y_train.max()+1)\n",
    "#     elif model == 'CNN':\n",
    "#         TargetModel = globals()['create_{}_model'.format(model)](y_train.max()+1, data_name)\n",
    "#     # print(TargetModel)\n",
    "#     TargetModel.to(device)\n",
    "#     loss_fn = nn.CrossEntropyLoss()\n",
    "#     optimizer = torch.optim.Adam(TargetModel.parameters(), lr=LEARNING_RATE)\n",
    "#     for t in range(epochs):\n",
    "#         print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "#         train(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "#     print(\"Done!\")\n",
    "#     weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_targetmodelsize{}.pth\".format(data_name, model, epochs, size))\n",
    "#     #weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_model{}.pth\".format(data_name, model, epochs, i))\n",
    "#     torch.save(TargetModel.state_dict(), weight_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "id": "f6357aa9-eb98-43e4-853c-ac8ae49d6f54",
   "metadata": {},
   "outputs": [],
   "source": [
    "shadow_result = []\n",
    "LIRA_result = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "id": "87d308e9-be9d-40fb-be9e-e84efc339052",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " Error: \n",
      " Accuracy: 85.7%  \n",
      "\n",
      "AUC value is: 0.8515604583333334\n",
      "Accuracy is: 0.5210833333333333\n",
      "AUC value is: 0.7385035833333334\n",
      "Accuracy is: 0.55225\n",
      " Error: \n",
      " Accuracy: 88.4%  \n",
      "\n",
      "AUC value is: 0.804947925\n",
      "Accuracy is: 0.5987\n",
      "AUC value is: 0.77031222\n",
      "Accuracy is: 0.5855\n",
      " Error: \n",
      " Accuracy: 90.4%  \n",
      "\n",
      "AUC value is: 0.7728200153061224\n",
      "Accuracy is: 0.7411428571428571\n",
      "AUC value is: 0.8028828316326531\n",
      "Accuracy is: 0.6258214285714285\n",
      " Error: \n",
      " Accuracy: 91.4%  \n",
      "\n",
      "AUC value is: 0.7546785679012346\n",
      "Accuracy is: 0.7876388888888889\n",
      "AUC value is: 0.8347663024691356\n",
      "Accuracy is: 0.6760277777777778\n",
      " Error: \n",
      " Accuracy: 93.0%  \n",
      "\n",
      "AUC value is: 0.7285570391999999\n",
      "Accuracy is: 0.7237\n",
      "AUC value is: 0.8676546096000002\n",
      "Accuracy is: 0.75474\n",
      " Error: \n",
      " Accuracy: 93.5%  \n",
      "\n",
      "AUC value is: 0.7139630133333335\n",
      "Accuracy is: 0.6931333333333334\n",
      "AUC value is: 0.8561532711111111\n",
      "Accuracy is: 0.7477333333333334\n"
     ]
    }
   ],
   "source": [
    "# 测试目标模型\n",
    "# 准备测试数据集\n",
    "for size in data_size:\n",
    "    # 准备数据\n",
    "    x = int(size*0.01)\n",
    "    x_test = X_data[:size*2]\n",
    "    y_test = Y_data[:size*2]\n",
    "    mem_label = np.concatenate((np.ones(size), np.zeros(size)), 0)\n",
    "    # 加载目标模型\n",
    "    TargetModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "    weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_targetmodelsize{}.pth\".format(data_name, model, epochs, size))\n",
    "    TargetModel.load_state_dict(torch.load(weight_path))\n",
    "    TargetModel.to(device)\n",
    "    \n",
    "    test_data = CustomDataset(x_test, y_test, model_transform)\n",
    "    test_dataloader = DataLoader(test_data, batch_size=batch_size)\n",
    "    \n",
    "    conf_data, label_data = get_model_pred(test_dataloader, TargetModel, device)\n",
    "    conf_data = conf_data.detach().cpu().numpy()\n",
    "    label_data = label_data.detach().cpu().numpy()\n",
    "    conf_data = conf_data.astype(np.float64)\n",
    "    score_tar = cal_score(conf_data.copy(), label_data)\n",
    "\n",
    "    # 执行影子模型攻击\n",
    "    targetX = conf_data\n",
    "    targetY = mem_label\n",
    "    targetX = targetX.astype(np.float32)\n",
    "    targetX, _ = get_top_k_conf(3, targetX, targetX)\n",
    "    shadow_attack_data = CustomDataset(targetX, targetY, attack_transform)\n",
    "    shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "    attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "    attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "    accuracy = evaluate_ROC(attack_test_scores, attack_test_mem)\n",
    "    shadow_result.append(accuracy)\n",
    "\n",
    "    # 执行风险评估攻击\n",
    "    score_tar = cal_score(conf_data.copy(), label_data)\n",
    "    pri_risk_t = pri_risk_all[:size*2]\n",
    "    pri_risk_rank_t = np.argsort(pri_risk_t)\n",
    "    pri_risk_rank_t = np.flip(pri_risk_rank_t)\n",
    "\n",
    "    pred_result = LIRA_attack(train_keep[:,:size*2], score_all[:,:size*2], score_tar, mem_label)\n",
    "    evaluate_ROC(pred_result, mem_label, threshold=0)\n",
    "    pred_clip = pred_result[pri_risk_rank_t[:x]]\n",
    "    mem_clip = mem_label[pri_risk_rank_t[:x]]\n",
    "    pred_clip = pred_clip > 0\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    LIRA_result.append(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "69620749-f8c1-4e2c-a55d-8d4a09245662",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "id": "d79ab40d-09fd-425c-a009-89ed56b8fb71",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.5210833333333333,\n",
       " 0.5987,\n",
       " 0.7411428571428571,\n",
       " 0.7876388888888889,\n",
       " 0.7237,\n",
       " 0.6931333333333334]"
      ]
     },
     "execution_count": 50,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "shadow_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "id": "5e342a8b-8230-48f9-b637-ab02454ea579",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.9833333333333333,\n",
       " 0.95,\n",
       " 0.9785714285714285,\n",
       " 0.9666666666666667,\n",
       " 0.98,\n",
       " 0.9833333333333333]"
      ]
     },
     "execution_count": 51,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "LIRA_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4668c031-3dad-4c03-be50-c2312564628f",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0fe80512-665e-4fcf-97de-3bf65c65475f",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d8664598-2020-45a0-ac47-54749cf832f3",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "73917e2c-3e1f-494e-be85-35d81ebcaf71",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "d663c248-17f7-4940-ac1e-6ae9d3ba5d8d",
   "metadata": {},
   "source": [
    "### 绘制泛化误差的影响"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "55ae827e-0953-46e1-bfdd-9c615abff316",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 2000\n",
    "shadow_result = []\n",
    "LIRA_result = []\n",
    "gene_distance = []\n",
    "base_result = []\n",
    "risk_base_result = []\n",
    "test_acc_list = []\n",
    "train_acc_list = []\n",
    "# l2_norm_list = [1e-4, 5e-4, 1e-3, 2e-3, 3e-3, 4e-3, 5e-3, 6e-3, 7e-3, 8e-3, 9e-3, 1e-2]\n",
    "l2_norm_list = [0, 5e-4, 1e-3, 5e-3, 1e-2]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "a119d58f-0052-4d91-9108-02dba5ffcd9a",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)\n",
    "X_tmp = dataframe.iloc[:, range(600)].values\n",
    "Y_tmp = np.array([i for i in dataframe.loc[:, 600]])\n",
    "x_test_data = X_tmp[90000:110000]\n",
    "y_test_data = Y_tmp[90000:110000]\n",
    "\n",
    "test_data = CustomDataset(x_test_data, y_test_data, model_transform)\n",
    "test_dataloader = DataLoader(test_data, batch_size=batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "8c20ff28-cef7-4498-b8d8-966e36e175c8",
   "metadata": {},
   "outputs": [],
   "source": [
    "class NN_4layer_dropout(nn.Module):\n",
    "    def __init__(self, input_feature, num_classes):\n",
    "        super().__init__()\n",
    "        self.fc1 = nn.Linear(input_feature, 1024)\n",
    "        self.fc2 = nn.Linear(1024, 512)\n",
    "        self.fc3 = nn.Linear(512, 256)\n",
    "        self.fc4 = nn.Linear(256, num_classes)\n",
    "        self.Tanh = nn.Tanh()\n",
    "        self.dropout = nn.Dropout(p=0.5)  # 添加Dropout层，p为丢弃概率\n",
    "\n",
    "    def forward(self, x):\n",
    "        x = self.Tanh(self.fc1(x))\n",
    "        x = self.dropout(x)  # 在第一层全连接层后添加Dropout层\n",
    "        x = self.Tanh(self.fc2(x))\n",
    "        x = self.dropout(x)  # 在第二层全连接层后添加Dropout层\n",
    "        x = self.Tanh(self.fc3(x))\n",
    "        x = self.dropout(x)  # 在第三层全连接层后添加Dropout层\n",
    "        logits = self.fc4(x)\n",
    "        # pred_probab = nn.LogSoftmax(dim=1)(logits)\n",
    "        return logits\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "18d2a643-10fe-45dc-9c8e-7e901cf4e547",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.615520  [  128/30013]\n",
      "loss: 4.029383  [12928/30013]\n",
      "loss: 2.896262  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 12.8%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.644739  [  128/30013]\n",
      "loss: 2.189019  [12928/30013]\n",
      "loss: 1.816627  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 40.4%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.879628  [  128/30013]\n",
      "loss: 1.586553  [12928/30013]\n",
      "loss: 1.515882  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.3%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.590261  [  128/30013]\n",
      "loss: 1.384720  [12928/30013]\n",
      "loss: 1.218332  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.9%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.295956  [  128/30013]\n",
      "loss: 1.253682  [12928/30013]\n",
      "loss: 1.091400  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.8%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.235368  [  128/30013]\n",
      "loss: 1.034571  [12928/30013]\n",
      "loss: 1.051405  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.5%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.150720  [  128/30013]\n",
      "loss: 1.105669  [12928/30013]\n",
      "loss: 1.054242  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.2%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.047033  [  128/30013]\n",
      "loss: 1.042712  [12928/30013]\n",
      "loss: 0.920061  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.2%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.143555  [  128/30013]\n",
      "loss: 0.987423  [12928/30013]\n",
      "loss: 0.911370  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.4%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.150037  [  128/30013]\n",
      "loss: 0.873434  [12928/30013]\n",
      "loss: 0.840676  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.8%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.014096  [  128/30013]\n",
      "loss: 0.927667  [12928/30013]\n",
      "loss: 0.828973  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.7%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.931443  [  128/30013]\n",
      "loss: 0.924842  [12928/30013]\n",
      "loss: 0.748435  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.7%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.881171  [  128/30013]\n",
      "loss: 0.871034  [12928/30013]\n",
      "loss: 0.800882  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.0%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.869477  [  128/30013]\n",
      "loss: 0.905058  [12928/30013]\n",
      "loss: 0.663718  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.7%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.953841  [  128/30013]\n",
      "loss: 0.772552  [12928/30013]\n",
      "loss: 0.700301  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.5%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.770767  [  128/30013]\n",
      "loss: 0.745365  [12928/30013]\n",
      "loss: 0.680350  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.3%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.786481  [  128/30013]\n",
      "loss: 0.712782  [12928/30013]\n",
      "loss: 0.611621  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.3%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.728126  [  128/30013]\n",
      "loss: 0.687655  [12928/30013]\n",
      "loss: 0.573819  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.0%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.765263  [  128/30013]\n",
      "loss: 0.683253  [12928/30013]\n",
      "loss: 0.696698  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.692766  [  128/30013]\n",
      "loss: 0.682361  [12928/30013]\n",
      "loss: 0.629957  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.7%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.746021  [  128/30013]\n",
      "loss: 0.576055  [12928/30013]\n",
      "loss: 0.607145  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.9%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.796364  [  128/30013]\n",
      "loss: 0.666349  [12928/30013]\n",
      "loss: 0.537472  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.6%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.610015  [  128/30013]\n",
      "loss: 0.618489  [12928/30013]\n",
      "loss: 0.538163  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.3%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.725861  [  128/30013]\n",
      "loss: 0.690209  [12928/30013]\n",
      "loss: 0.582856  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.4%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.660549  [  128/30013]\n",
      "loss: 0.709034  [12928/30013]\n",
      "loss: 0.557009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.7%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.605201  [  128/30013]\n",
      "loss: 0.656795  [12928/30013]\n",
      "loss: 0.627485  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.3%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.661719  [  128/30013]\n",
      "loss: 0.448093  [12928/30013]\n",
      "loss: 0.528646  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.6%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.719687  [  128/30013]\n",
      "loss: 0.571077  [12928/30013]\n",
      "loss: 0.428930  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.7%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.833538  [  128/30013]\n",
      "loss: 0.639335  [12928/30013]\n",
      "loss: 0.455227  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.7%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.671735  [  128/30013]\n",
      "loss: 0.500937  [12928/30013]\n",
      "loss: 0.519259  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.3%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.603684  [  128/30013]\n",
      "loss: 0.506211  [12928/30013]\n",
      "loss: 0.440329  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.6%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.559436  [  128/30013]\n",
      "loss: 0.544864  [12928/30013]\n",
      "loss: 0.437449  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.0%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.549594  [  128/30013]\n",
      "loss: 0.525815  [12928/30013]\n",
      "loss: 0.409651  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.2%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.545063  [  128/30013]\n",
      "loss: 0.667883  [12928/30013]\n",
      "loss: 0.470953  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.4%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.492487  [  128/30013]\n",
      "loss: 0.566140  [12928/30013]\n",
      "loss: 0.377171  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.9%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.584132  [  128/30013]\n",
      "loss: 0.449230  [12928/30013]\n",
      "loss: 0.417149  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.4%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.658430  [  128/30013]\n",
      "loss: 0.426151  [12928/30013]\n",
      "loss: 0.391498  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.1%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.575981  [  128/30013]\n",
      "loss: 0.628216  [12928/30013]\n",
      "loss: 0.399275  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.4%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.486128  [  128/30013]\n",
      "loss: 0.559640  [12928/30013]\n",
      "loss: 0.445060  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.4%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.517476  [  128/30013]\n",
      "loss: 0.592420  [12928/30013]\n",
      "loss: 0.424610  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.9%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.509328  [  128/30013]\n",
      "loss: 0.430952  [12928/30013]\n",
      "loss: 0.344476  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.2%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.397493  [  128/30013]\n",
      "loss: 0.500966  [12928/30013]\n",
      "loss: 0.384216  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.1%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.487380  [  128/30013]\n",
      "loss: 0.415272  [12928/30013]\n",
      "loss: 0.416650  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.5%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.483283  [  128/30013]\n",
      "loss: 0.484864  [12928/30013]\n",
      "loss: 0.376453  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.6%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.564391  [  128/30013]\n",
      "loss: 0.546233  [12928/30013]\n",
      "loss: 0.366841  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.3%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.481140  [  128/30013]\n",
      "loss: 0.452686  [12928/30013]\n",
      "loss: 0.372559  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.6%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.414443  [  128/30013]\n",
      "loss: 0.374551  [12928/30013]\n",
      "loss: 0.343180  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.538108  [  128/30013]\n",
      "loss: 0.462801  [12928/30013]\n",
      "loss: 0.384567  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.9%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.514955  [  128/30013]\n",
      "loss: 0.476686  [12928/30013]\n",
      "loss: 0.374500  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.2%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.519062  [  128/30013]\n",
      "loss: 0.489484  [12928/30013]\n",
      "loss: 0.327758  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.5%\n",
      "Test Error: \n",
      " Accuracy: 86.2%, Avg loss: 0.379379 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 97.2%, Avg loss: 0.139401 \n",
      "\n",
      "train: 0.9716123013360877 test: 0.862\n",
      " Error: \n",
      " Accuracy: 91.4%  \n",
      "\n",
      "AUC value is: 0.5278108068889181\n",
      "Accuracy is: 0.49983333333333335\n",
      "AUC value is: 0.5311926964128507\n",
      "Accuracy is: 0.491\n",
      "Test Error: \n",
      " Accuracy: 91.4%, Avg loss: 0.264357 \n",
      "\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.626681  [  128/30013]\n",
      "loss: 4.026300  [12928/30013]\n",
      "loss: 2.809692  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 13.9%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.523779  [  128/30013]\n",
      "loss: 2.111935  [12928/30013]\n",
      "loss: 1.766751  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 41.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.850596  [  128/30013]\n",
      "loss: 1.639399  [12928/30013]\n",
      "loss: 1.477080  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.6%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.565612  [  128/30013]\n",
      "loss: 1.425628  [12928/30013]\n",
      "loss: 1.347870  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.1%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.390539  [  128/30013]\n",
      "loss: 1.244777  [12928/30013]\n",
      "loss: 1.194785  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.9%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.350902  [  128/30013]\n",
      "loss: 1.268238  [12928/30013]\n",
      "loss: 1.070449  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.7%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.193728  [  128/30013]\n",
      "loss: 1.145854  [12928/30013]\n",
      "loss: 1.071443  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.0%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.179465  [  128/30013]\n",
      "loss: 0.931869  [12928/30013]\n",
      "loss: 0.905886  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.5%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.035469  [  128/30013]\n",
      "loss: 1.011194  [12928/30013]\n",
      "loss: 0.862448  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.4%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.000962  [  128/30013]\n",
      "loss: 0.959224  [12928/30013]\n",
      "loss: 0.862325  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.6%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.024783  [  128/30013]\n",
      "loss: 0.859510  [12928/30013]\n",
      "loss: 0.829451  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.4%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.916493  [  128/30013]\n",
      "loss: 0.849476  [12928/30013]\n",
      "loss: 0.801938  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.4%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.871614  [  128/30013]\n",
      "loss: 0.872633  [12928/30013]\n",
      "loss: 0.749092  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.5%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.924775  [  128/30013]\n",
      "loss: 0.838128  [12928/30013]\n",
      "loss: 0.762619  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.8%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.821830  [  128/30013]\n",
      "loss: 0.839544  [12928/30013]\n",
      "loss: 0.610670  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.0%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.877718  [  128/30013]\n",
      "loss: 0.801884  [12928/30013]\n",
      "loss: 0.640049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.5%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.800766  [  128/30013]\n",
      "loss: 0.741033  [12928/30013]\n",
      "loss: 0.712043  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.8%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.640768  [  128/30013]\n",
      "loss: 0.679129  [12928/30013]\n",
      "loss: 0.708067  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.9%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.652001  [  128/30013]\n",
      "loss: 0.776215  [12928/30013]\n",
      "loss: 0.552918  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.722470  [  128/30013]\n",
      "loss: 0.689006  [12928/30013]\n",
      "loss: 0.625504  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.9%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.702680  [  128/30013]\n",
      "loss: 0.770175  [12928/30013]\n",
      "loss: 0.604559  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.8%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.657581  [  128/30013]\n",
      "loss: 0.714577  [12928/30013]\n",
      "loss: 0.576340  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.2%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.804573  [  128/30013]\n",
      "loss: 0.771556  [12928/30013]\n",
      "loss: 0.591648  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.1%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.651425  [  128/30013]\n",
      "loss: 0.641813  [12928/30013]\n",
      "loss: 0.582577  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.2%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.736587  [  128/30013]\n",
      "loss: 0.611633  [12928/30013]\n",
      "loss: 0.569531  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.2%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.683218  [  128/30013]\n",
      "loss: 0.677590  [12928/30013]\n",
      "loss: 0.487993  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.611995  [  128/30013]\n",
      "loss: 0.599112  [12928/30013]\n",
      "loss: 0.520140  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.2%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.595191  [  128/30013]\n",
      "loss: 0.603179  [12928/30013]\n",
      "loss: 0.541847  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.6%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.750815  [  128/30013]\n",
      "loss: 0.567280  [12928/30013]\n",
      "loss: 0.473221  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.2%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.644506  [  128/30013]\n",
      "loss: 0.547142  [12928/30013]\n",
      "loss: 0.627630  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.3%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.663951  [  128/30013]\n",
      "loss: 0.618297  [12928/30013]\n",
      "loss: 0.447612  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.2%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.672498  [  128/30013]\n",
      "loss: 0.525578  [12928/30013]\n",
      "loss: 0.443108  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.4%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.676336  [  128/30013]\n",
      "loss: 0.489515  [12928/30013]\n",
      "loss: 0.503680  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.8%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.592168  [  128/30013]\n",
      "loss: 0.565224  [12928/30013]\n",
      "loss: 0.505548  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.1%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.540070  [  128/30013]\n",
      "loss: 0.580772  [12928/30013]\n",
      "loss: 0.590403  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.3%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.580039  [  128/30013]\n",
      "loss: 0.532251  [12928/30013]\n",
      "loss: 0.539375  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.3%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.559147  [  128/30013]\n",
      "loss: 0.519853  [12928/30013]\n",
      "loss: 0.480630  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.2%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.607596  [  128/30013]\n",
      "loss: 0.562716  [12928/30013]\n",
      "loss: 0.415267  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.612799  [  128/30013]\n",
      "loss: 0.597652  [12928/30013]\n",
      "loss: 0.425335  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.2%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.589378  [  128/30013]\n",
      "loss: 0.623428  [12928/30013]\n",
      "loss: 0.455509  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.2%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.595317  [  128/30013]\n",
      "loss: 0.536307  [12928/30013]\n",
      "loss: 0.398171  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.1%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.560315  [  128/30013]\n",
      "loss: 0.577815  [12928/30013]\n",
      "loss: 0.454251  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.7%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.611788  [  128/30013]\n",
      "loss: 0.505896  [12928/30013]\n",
      "loss: 0.428256  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.7%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.537811  [  128/30013]\n",
      "loss: 0.517004  [12928/30013]\n",
      "loss: 0.369858  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.2%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.461233  [  128/30013]\n",
      "loss: 0.509487  [12928/30013]\n",
      "loss: 0.469308  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.9%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.526719  [  128/30013]\n",
      "loss: 0.450512  [12928/30013]\n",
      "loss: 0.432598  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.3%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.601262  [  128/30013]\n",
      "loss: 0.463310  [12928/30013]\n",
      "loss: 0.462980  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.6%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.506772  [  128/30013]\n",
      "loss: 0.477489  [12928/30013]\n",
      "loss: 0.487863  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.6%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.528888  [  128/30013]\n",
      "loss: 0.643716  [12928/30013]\n",
      "loss: 0.404904  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.8%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.619480  [  128/30013]\n",
      "loss: 0.530560  [12928/30013]\n",
      "loss: 0.429933  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.7%\n",
      "Test Error: \n",
      " Accuracy: 85.5%, Avg loss: 0.406148 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 95.0%, Avg loss: 0.221648 \n",
      "\n",
      "train: 0.9504881218138806 test: 0.85525\n",
      " Error: \n",
      " Accuracy: 90.1%  \n",
      "\n",
      "AUC value is: 0.5104645264094501\n",
      "Accuracy is: 0.49978333333333336\n",
      "AUC value is: 0.5162513724960911\n",
      "Accuracy is: 0.49075\n",
      "Test Error: \n",
      " Accuracy: 90.1%, Avg loss: 0.318414 \n",
      "\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.600365  [  128/30013]\n",
      "loss: 4.025090  [12928/30013]\n",
      "loss: 2.896778  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 12.2%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.646130  [  128/30013]\n",
      "loss: 2.129632  [12928/30013]\n",
      "loss: 1.827472  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 40.8%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.842540  [  128/30013]\n",
      "loss: 1.540026  [12928/30013]\n",
      "loss: 1.510671  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.6%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.548324  [  128/30013]\n",
      "loss: 1.455202  [12928/30013]\n",
      "loss: 1.415171  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.0%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.503998  [  128/30013]\n",
      "loss: 1.355775  [12928/30013]\n",
      "loss: 1.135381  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.0%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.350536  [  128/30013]\n",
      "loss: 1.259227  [12928/30013]\n",
      "loss: 1.051929  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.4%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.314269  [  128/30013]\n",
      "loss: 1.143339  [12928/30013]\n",
      "loss: 0.977282  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.8%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.174293  [  128/30013]\n",
      "loss: 1.079715  [12928/30013]\n",
      "loss: 1.057010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.7%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.104608  [  128/30013]\n",
      "loss: 1.002696  [12928/30013]\n",
      "loss: 1.018977  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.0%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.069829  [  128/30013]\n",
      "loss: 0.949207  [12928/30013]\n",
      "loss: 0.836233  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.4%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.100926  [  128/30013]\n",
      "loss: 0.911918  [12928/30013]\n",
      "loss: 0.855578  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.6%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.011966  [  128/30013]\n",
      "loss: 0.869874  [12928/30013]\n",
      "loss: 0.748507  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.4%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.050266  [  128/30013]\n",
      "loss: 0.792830  [12928/30013]\n",
      "loss: 0.767446  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.5%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.943103  [  128/30013]\n",
      "loss: 0.811471  [12928/30013]\n",
      "loss: 0.848362  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.3%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.884889  [  128/30013]\n",
      "loss: 0.796566  [12928/30013]\n",
      "loss: 0.764133  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.0%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.914669  [  128/30013]\n",
      "loss: 0.806371  [12928/30013]\n",
      "loss: 0.778952  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.2%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.910708  [  128/30013]\n",
      "loss: 0.826834  [12928/30013]\n",
      "loss: 0.692857  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.8%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.871175  [  128/30013]\n",
      "loss: 0.865126  [12928/30013]\n",
      "loss: 0.716508  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.5%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.854282  [  128/30013]\n",
      "loss: 0.786734  [12928/30013]\n",
      "loss: 0.679614  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.819149  [  128/30013]\n",
      "loss: 0.762886  [12928/30013]\n",
      "loss: 0.778298  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.6%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.803147  [  128/30013]\n",
      "loss: 0.679174  [12928/30013]\n",
      "loss: 0.676423  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.1%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.784547  [  128/30013]\n",
      "loss: 0.691300  [12928/30013]\n",
      "loss: 0.624198  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.5%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.818647  [  128/30013]\n",
      "loss: 0.690381  [12928/30013]\n",
      "loss: 0.623666  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.8%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.684389  [  128/30013]\n",
      "loss: 0.691628  [12928/30013]\n",
      "loss: 0.702354  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.7%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.719832  [  128/30013]\n",
      "loss: 0.792289  [12928/30013]\n",
      "loss: 0.634649  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.5%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.860588  [  128/30013]\n",
      "loss: 0.736141  [12928/30013]\n",
      "loss: 0.563297  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.742339  [  128/30013]\n",
      "loss: 0.700659  [12928/30013]\n",
      "loss: 0.626353  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.2%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.749386  [  128/30013]\n",
      "loss: 0.612292  [12928/30013]\n",
      "loss: 0.617662  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.5%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.712607  [  128/30013]\n",
      "loss: 0.714370  [12928/30013]\n",
      "loss: 0.591533  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.7%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.655611  [  128/30013]\n",
      "loss: 0.685434  [12928/30013]\n",
      "loss: 0.571068  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.9%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.716320  [  128/30013]\n",
      "loss: 0.628741  [12928/30013]\n",
      "loss: 0.569565  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.3%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.663554  [  128/30013]\n",
      "loss: 0.735246  [12928/30013]\n",
      "loss: 0.540380  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.5%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.676105  [  128/30013]\n",
      "loss: 0.650473  [12928/30013]\n",
      "loss: 0.575480  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.7%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.662984  [  128/30013]\n",
      "loss: 0.553617  [12928/30013]\n",
      "loss: 0.554488  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.1%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.698669  [  128/30013]\n",
      "loss: 0.616247  [12928/30013]\n",
      "loss: 0.575471  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.9%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.709556  [  128/30013]\n",
      "loss: 0.596250  [12928/30013]\n",
      "loss: 0.536105  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.2%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.709145  [  128/30013]\n",
      "loss: 0.670861  [12928/30013]\n",
      "loss: 0.568941  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.7%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.721331  [  128/30013]\n",
      "loss: 0.641685  [12928/30013]\n",
      "loss: 0.527178  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.5%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.590667  [  128/30013]\n",
      "loss: 0.554180  [12928/30013]\n",
      "loss: 0.499208  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.7%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.496813  [  128/30013]\n",
      "loss: 0.634346  [12928/30013]\n",
      "loss: 0.503180  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.8%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.588682  [  128/30013]\n",
      "loss: 0.566204  [12928/30013]\n",
      "loss: 0.491720  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.7%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.694752  [  128/30013]\n",
      "loss: 0.539176  [12928/30013]\n",
      "loss: 0.500259  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.8%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.569618  [  128/30013]\n",
      "loss: 0.588120  [12928/30013]\n",
      "loss: 0.440769  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.2%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.678409  [  128/30013]\n",
      "loss: 0.617698  [12928/30013]\n",
      "loss: 0.540679  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.2%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.610108  [  128/30013]\n",
      "loss: 0.546383  [12928/30013]\n",
      "loss: 0.456041  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.4%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.625311  [  128/30013]\n",
      "loss: 0.622709  [12928/30013]\n",
      "loss: 0.549207  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.6%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.628571  [  128/30013]\n",
      "loss: 0.542355  [12928/30013]\n",
      "loss: 0.491192  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.539220  [  128/30013]\n",
      "loss: 0.537613  [12928/30013]\n",
      "loss: 0.420182  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.8%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.651741  [  128/30013]\n",
      "loss: 0.567825  [12928/30013]\n",
      "loss: 0.468894  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.1%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.736474  [  128/30013]\n",
      "loss: 0.561445  [12928/30013]\n",
      "loss: 0.531068  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.0%\n",
      "Test Error: \n",
      " Accuracy: 85.2%, Avg loss: 0.444809 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 93.3%, Avg loss: 0.292013 \n",
      "\n",
      "train: 0.9326291940159265 test: 0.852\n",
      " Error: \n",
      " Accuracy: 89.0%  \n",
      "\n",
      "AUC value is: 0.5047138014407028\n",
      "Accuracy is: 0.49978333333333336\n",
      "AUC value is: 0.508817931100256\n",
      "Accuracy is: 0.49065\n",
      "Test Error: \n",
      " Accuracy: 89.0%, Avg loss: 0.371230 \n",
      "\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.636424  [  128/30013]\n",
      "loss: 4.033350  [12928/30013]\n",
      "loss: 2.952557  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 11.5%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.694256  [  128/30013]\n",
      "loss: 2.225832  [12928/30013]\n",
      "loss: 1.926853  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.1%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.917063  [  128/30013]\n",
      "loss: 1.704982  [12928/30013]\n",
      "loss: 1.579432  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.8%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.617297  [  128/30013]\n",
      "loss: 1.463523  [12928/30013]\n",
      "loss: 1.364879  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 55.2%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.543891  [  128/30013]\n",
      "loss: 1.385372  [12928/30013]\n",
      "loss: 1.401300  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 57.2%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.434452  [  128/30013]\n",
      "loss: 1.278677  [12928/30013]\n",
      "loss: 1.275673  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 58.5%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.427657  [  128/30013]\n",
      "loss: 1.214476  [12928/30013]\n",
      "loss: 1.221854  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.6%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.391470  [  128/30013]\n",
      "loss: 1.208985  [12928/30013]\n",
      "loss: 1.149844  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.8%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.327110  [  128/30013]\n",
      "loss: 1.186638  [12928/30013]\n",
      "loss: 1.073648  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.5%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.252490  [  128/30013]\n",
      "loss: 1.063032  [12928/30013]\n",
      "loss: 1.137359  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.1%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.304759  [  128/30013]\n",
      "loss: 1.160948  [12928/30013]\n",
      "loss: 1.081625  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.0%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.290439  [  128/30013]\n",
      "loss: 1.137814  [12928/30013]\n",
      "loss: 1.035744  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.4%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.309662  [  128/30013]\n",
      "loss: 1.160718  [12928/30013]\n",
      "loss: 1.072906  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.7%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.123108  [  128/30013]\n",
      "loss: 1.123947  [12928/30013]\n",
      "loss: 1.015293  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.3%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.146143  [  128/30013]\n",
      "loss: 1.038970  [12928/30013]\n",
      "loss: 1.037089  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.0%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.117322  [  128/30013]\n",
      "loss: 1.003156  [12928/30013]\n",
      "loss: 0.980883  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.7%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.115837  [  128/30013]\n",
      "loss: 1.013608  [12928/30013]\n",
      "loss: 1.077625  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.8%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.155848  [  128/30013]\n",
      "loss: 1.038568  [12928/30013]\n",
      "loss: 1.059667  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.4%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 1.109843  [  128/30013]\n",
      "loss: 1.070355  [12928/30013]\n",
      "loss: 0.976388  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 1.113474  [  128/30013]\n",
      "loss: 1.030837  [12928/30013]\n",
      "loss: 0.919820  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.9%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.078059  [  128/30013]\n",
      "loss: 0.960936  [12928/30013]\n",
      "loss: 0.954310  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.4%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 1.097857  [  128/30013]\n",
      "loss: 1.000636  [12928/30013]\n",
      "loss: 0.946417  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.6%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 1.029069  [  128/30013]\n",
      "loss: 1.036142  [12928/30013]\n",
      "loss: 0.971633  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.7%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 1.114582  [  128/30013]\n",
      "loss: 0.939037  [12928/30013]\n",
      "loss: 0.935905  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.7%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 1.076549  [  128/30013]\n",
      "loss: 0.963530  [12928/30013]\n",
      "loss: 0.917679  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.8%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 1.046878  [  128/30013]\n",
      "loss: 0.908810  [12928/30013]\n",
      "loss: 0.916566  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.2%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 1.067928  [  128/30013]\n",
      "loss: 0.865499  [12928/30013]\n",
      "loss: 0.958723  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.2%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 1.070727  [  128/30013]\n",
      "loss: 0.984241  [12928/30013]\n",
      "loss: 0.857376  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.6%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 1.067176  [  128/30013]\n",
      "loss: 0.942375  [12928/30013]\n",
      "loss: 0.916830  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.5%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.994030  [  128/30013]\n",
      "loss: 0.896092  [12928/30013]\n",
      "loss: 0.977562  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.9%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 1.003110  [  128/30013]\n",
      "loss: 0.926905  [12928/30013]\n",
      "loss: 0.931965  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.1%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.996803  [  128/30013]\n",
      "loss: 0.923906  [12928/30013]\n",
      "loss: 0.886567  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.8%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 1.070159  [  128/30013]\n",
      "loss: 1.000526  [12928/30013]\n",
      "loss: 0.865468  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.2%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 1.031522  [  128/30013]\n",
      "loss: 0.936015  [12928/30013]\n",
      "loss: 0.897359  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.2%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.966631  [  128/30013]\n",
      "loss: 0.938990  [12928/30013]\n",
      "loss: 0.948231  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.2%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 1.008394  [  128/30013]\n",
      "loss: 0.904618  [12928/30013]\n",
      "loss: 0.990366  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.3%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 1.030674  [  128/30013]\n",
      "loss: 0.948337  [12928/30013]\n",
      "loss: 0.960577  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.988186  [  128/30013]\n",
      "loss: 0.870819  [12928/30013]\n",
      "loss: 0.932202  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.1%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 1.077082  [  128/30013]\n",
      "loss: 1.051192  [12928/30013]\n",
      "loss: 0.978330  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.6%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 1.015605  [  128/30013]\n",
      "loss: 0.952798  [12928/30013]\n",
      "loss: 0.916444  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.3%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 1.034719  [  128/30013]\n",
      "loss: 0.922491  [12928/30013]\n",
      "loss: 0.932539  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.8%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 1.000647  [  128/30013]\n",
      "loss: 0.883845  [12928/30013]\n",
      "loss: 1.018450  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.4%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 1.010864  [  128/30013]\n",
      "loss: 0.935119  [12928/30013]\n",
      "loss: 0.905748  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.8%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.994953  [  128/30013]\n",
      "loss: 0.872127  [12928/30013]\n",
      "loss: 0.897920  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.9%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.965854  [  128/30013]\n",
      "loss: 0.938920  [12928/30013]\n",
      "loss: 0.947690  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 1.066699  [  128/30013]\n",
      "loss: 0.907987  [12928/30013]\n",
      "loss: 0.860728  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.2%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 1.041987  [  128/30013]\n",
      "loss: 0.834605  [12928/30013]\n",
      "loss: 0.842150  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 1.024694  [  128/30013]\n",
      "loss: 0.897776  [12928/30013]\n",
      "loss: 0.904659  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.970685  [  128/30013]\n",
      "loss: 0.885438  [12928/30013]\n",
      "loss: 0.869023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.4%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 1.058749  [  128/30013]\n",
      "loss: 0.886212  [12928/30013]\n",
      "loss: 0.925048  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.3%\n",
      "Test Error: \n",
      " Accuracy: 75.7%, Avg loss: 0.811490 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 79.9%, Avg loss: 0.733263 \n",
      "\n",
      "train: 0.7992869756438876 test: 0.7568\n",
      " Error: \n",
      " Accuracy: 77.6%  \n",
      "\n",
      "AUC value is: 0.4998313183016586\n",
      "Accuracy is: 0.49978333333333336\n",
      "AUC value is: 0.4942197894701605\n",
      "Accuracy is: 0.4905\n",
      "Test Error: \n",
      " Accuracy: 77.6%, Avg loss: 0.773639 \n",
      "\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.610397  [  128/30013]\n",
      "loss: 4.194408  [12928/30013]\n",
      "loss: 3.086491  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 9.8%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.970113  [  128/30013]\n",
      "loss: 2.399531  [12928/30013]\n",
      "loss: 2.145017  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 33.7%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 2.174191  [  128/30013]\n",
      "loss: 1.939084  [12928/30013]\n",
      "loss: 1.914948  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.8%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.864460  [  128/30013]\n",
      "loss: 1.725444  [12928/30013]\n",
      "loss: 1.661601  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 49.1%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.753866  [  128/30013]\n",
      "loss: 1.571379  [12928/30013]\n",
      "loss: 1.454154  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.4%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.590845  [  128/30013]\n",
      "loss: 1.476483  [12928/30013]\n",
      "loss: 1.438809  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.8%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.549868  [  128/30013]\n",
      "loss: 1.377185  [12928/30013]\n",
      "loss: 1.397029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 57.7%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.452627  [  128/30013]\n",
      "loss: 1.313823  [12928/30013]\n",
      "loss: 1.353176  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 58.6%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.457536  [  128/30013]\n",
      "loss: 1.329557  [12928/30013]\n",
      "loss: 1.354743  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.5%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.463095  [  128/30013]\n",
      "loss: 1.290683  [12928/30013]\n",
      "loss: 1.294927  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.8%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.460503  [  128/30013]\n",
      "loss: 1.257050  [12928/30013]\n",
      "loss: 1.269855  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.1%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.450913  [  128/30013]\n",
      "loss: 1.255942  [12928/30013]\n",
      "loss: 1.246297  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.9%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.367465  [  128/30013]\n",
      "loss: 1.290252  [12928/30013]\n",
      "loss: 1.227917  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.8%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.342579  [  128/30013]\n",
      "loss: 1.246462  [12928/30013]\n",
      "loss: 1.255244  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.6%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.372787  [  128/30013]\n",
      "loss: 1.253356  [12928/30013]\n",
      "loss: 1.238165  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.8%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.390071  [  128/30013]\n",
      "loss: 1.220970  [12928/30013]\n",
      "loss: 1.230694  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.6%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.395454  [  128/30013]\n",
      "loss: 1.206432  [12928/30013]\n",
      "loss: 1.268427  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.2%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.373177  [  128/30013]\n",
      "loss: 1.245015  [12928/30013]\n",
      "loss: 1.263344  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.6%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 1.358632  [  128/30013]\n",
      "loss: 1.271326  [12928/30013]\n",
      "loss: 1.217774  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.4%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 1.373912  [  128/30013]\n",
      "loss: 1.222352  [12928/30013]\n",
      "loss: 1.156695  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.8%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.332367  [  128/30013]\n",
      "loss: 1.292860  [12928/30013]\n",
      "loss: 1.230715  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.7%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 1.316380  [  128/30013]\n",
      "loss: 1.215894  [12928/30013]\n",
      "loss: 1.270759  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.8%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 1.368206  [  128/30013]\n",
      "loss: 1.171352  [12928/30013]\n",
      "loss: 1.231138  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.0%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 1.337731  [  128/30013]\n",
      "loss: 1.227820  [12928/30013]\n",
      "loss: 1.226476  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.7%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 1.349350  [  128/30013]\n",
      "loss: 1.192824  [12928/30013]\n",
      "loss: 1.290541  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.1%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 1.370114  [  128/30013]\n",
      "loss: 1.187422  [12928/30013]\n",
      "loss: 1.247576  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.1%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 1.414676  [  128/30013]\n",
      "loss: 1.241263  [12928/30013]\n",
      "loss: 1.238384  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.4%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 1.333863  [  128/30013]\n",
      "loss: 1.221077  [12928/30013]\n",
      "loss: 1.198494  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.9%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 1.334219  [  128/30013]\n",
      "loss: 1.206743  [12928/30013]\n",
      "loss: 1.126855  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 1.386092  [  128/30013]\n",
      "loss: 1.210850  [12928/30013]\n",
      "loss: 1.249204  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 1.325635  [  128/30013]\n",
      "loss: 1.200870  [12928/30013]\n",
      "loss: 1.207380  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 1.388685  [  128/30013]\n",
      "loss: 1.181496  [12928/30013]\n",
      "loss: 1.190954  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 1.339767  [  128/30013]\n",
      "loss: 1.211193  [12928/30013]\n",
      "loss: 1.199947  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.4%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 1.333515  [  128/30013]\n",
      "loss: 1.214076  [12928/30013]\n",
      "loss: 1.260024  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 1.299796  [  128/30013]\n",
      "loss: 1.175821  [12928/30013]\n",
      "loss: 1.275438  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.5%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 1.357211  [  128/30013]\n",
      "loss: 1.249107  [12928/30013]\n",
      "loss: 1.175884  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 1.295209  [  128/30013]\n",
      "loss: 1.197450  [12928/30013]\n",
      "loss: 1.210349  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.1%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 1.311972  [  128/30013]\n",
      "loss: 1.196221  [12928/30013]\n",
      "loss: 1.183016  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 1.323226  [  128/30013]\n",
      "loss: 1.205644  [12928/30013]\n",
      "loss: 1.180212  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.5%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 1.362728  [  128/30013]\n",
      "loss: 1.181855  [12928/30013]\n",
      "loss: 1.226123  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.1%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 1.328221  [  128/30013]\n",
      "loss: 1.223342  [12928/30013]\n",
      "loss: 1.159177  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.4%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 1.329392  [  128/30013]\n",
      "loss: 1.177753  [12928/30013]\n",
      "loss: 1.149042  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.4%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 1.367940  [  128/30013]\n",
      "loss: 1.216851  [12928/30013]\n",
      "loss: 1.218241  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 1.310505  [  128/30013]\n",
      "loss: 1.255800  [12928/30013]\n",
      "loss: 1.251457  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.3%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 1.334828  [  128/30013]\n",
      "loss: 1.178497  [12928/30013]\n",
      "loss: 1.183632  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.4%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 1.294661  [  128/30013]\n",
      "loss: 1.180102  [12928/30013]\n",
      "loss: 1.207755  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.6%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 1.307868  [  128/30013]\n",
      "loss: 1.207303  [12928/30013]\n",
      "loss: 1.173058  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.7%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 1.314494  [  128/30013]\n",
      "loss: 1.202052  [12928/30013]\n",
      "loss: 1.189825  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.1%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 1.311520  [  128/30013]\n",
      "loss: 1.167786  [12928/30013]\n",
      "loss: 1.152734  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.5%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 1.365180  [  128/30013]\n",
      "loss: 1.209720  [12928/30013]\n",
      "loss: 1.207947  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.4%\n",
      "Test Error: \n",
      " Accuracy: 66.0%, Avg loss: 1.146595 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 69.6%, Avg loss: 1.097310 \n",
      "\n",
      "train: 0.6957984873221604 test: 0.6596\n",
      " Error: \n",
      " Accuracy: 67.9%  \n",
      "\n",
      "AUC value is: 0.49999998499999715\n",
      "Accuracy is: 0.49978333333333336\n",
      "AUC value is: 0.4911711866754784\n",
      "Accuracy is: 0.4905\n",
      "Test Error: \n",
      " Accuracy: 67.9%, Avg loss: 1.125110 \n",
      "\n"
     ]
    }
   ],
   "source": [
    "(x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp = load_Purchase100_limited(0, 100, prop_keep=0.5, seed=0)\n",
    "training_data = CustomDataset(x_train, y_train, model_transform)\n",
    "train_dataloader = DataLoader(training_data, batch_size=batch_size)\n",
    "\n",
    "\n",
    "for l2_norm in l2_norm_list:\n",
    "    \n",
    "    TargetModel = NN_4layer_dropout(x_train.shape[1], y_train.max()+1)\n",
    "    # print(TargetModel)\n",
    "    TargetModel.to(device)\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    optimizer = torch.optim.Adam(TargetModel.parameters(), lr=2e-4, weight_decay=l2_norm)\n",
    "    # optimizer = torch.optim.Adam(TargetModel.parameters(), lr=2e-4)\n",
    "    for t in range(50):\n",
    "        print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "        train(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "    \n",
    "    test_acc = evaluate(test_dataloader, TargetModel, loss_fn, device)\n",
    "    train_acc = evaluate(train_dataloader, TargetModel, loss_fn, device)\n",
    "    test_acc_list.append(test_acc)\n",
    "    train_acc_list.append(train_acc)\n",
    "    print(\"train:\",train_acc,\"test:\",test_acc)\n",
    "    distance = train_acc - test_acc\n",
    "    gene_distance.append(distance)\n",
    "    \n",
    "    \n",
    "    x_test = X_data\n",
    "    y_test = Y_data\n",
    "    mem_label = train_keep[0]\n",
    "    # 加载目标模型\n",
    "    \n",
    "    all_data = CustomDataset(x_test, y_test, model_transform)\n",
    "    all_dataloader = DataLoader(all_data, batch_size=batch_size)\n",
    "    \n",
    "    conf_data, label_data = get_model_pred(all_dataloader, TargetModel, device)\n",
    "    conf_data = conf_data.detach().cpu().numpy()\n",
    "    label_data = label_data.detach().cpu().numpy()\n",
    "    conf_data = conf_data.astype(np.float64)\n",
    "    score_tar = cal_score(conf_data.copy(), label_data)\n",
    "    \n",
    "    # 执行影子模型攻击\n",
    "    targetX = conf_data\n",
    "    targetY = mem_label\n",
    "    targetX = targetX.astype(np.float32)\n",
    "    targetX, _ = get_top_k_conf(3, targetX, targetX)\n",
    "    shadow_attack_data = CustomDataset(targetX, targetY, attack_transform)\n",
    "    shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "    attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "    attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "    accuracy = evaluate_ROC(attack_test_scores, attack_test_mem)\n",
    "    shadow_result.append(accuracy)\n",
    "    \n",
    "    # 执行风险评估攻击\n",
    "    score_tar = cal_score(conf_data.copy(), label_data)\n",
    "    pri_risk_t = pri_risk_all\n",
    "    pri_risk_rank_t = np.argsort(pri_risk_t)\n",
    "    pri_risk_rank_t = np.flip(pri_risk_rank_t)\n",
    "    \n",
    "    pred_result = LIRA_attack(train_keep, score_all, score_tar, mem_label)\n",
    "    evaluate_ROC(pred_result, mem_label, threshold=0)\n",
    "    pred_clip = pred_result[pri_risk_rank_t[:x]]\n",
    "    mem_clip = mem_label[pri_risk_rank_t[:x]]\n",
    "    pred_clip = pred_clip > 0\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    LIRA_result.append(accuracy)\n",
    "    \n",
    "    \n",
    "    # 执行基线攻击\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    pred_result = base_attack(all_dataloader, TargetModel, loss_fn, device)\n",
    "    accuracy = metrics.accuracy_score(train_keep[tar_model], pred_result)\n",
    "    base_result.append(accuracy)\n",
    "    \n",
    "    \n",
    "    \n",
    "    pred_clip = pred_result[pri_risk_rank[:x]]\n",
    "    mem_clip = train_keep[tar_model][pri_risk_rank[:x]]\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    risk_base_result.append(accuracy)\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "328b7c87-f8df-4274-93d7-a11557ae127e",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "4b56524a-b91d-4511-8e47-0ce860805fcd",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.862, 0.85525, 0.852, 0.7568, 0.6596]"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_acc_list"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "abbfeaf5-e1b5-46f4-9e4c-a9ec2892b26f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.9716123013360877,\n",
       " 0.9504881218138806,\n",
       " 0.9326291940159265,\n",
       " 0.7992869756438876,\n",
       " 0.6957984873221604]"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_acc_list"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "3e2676d8-4698-44fb-a0d1-15b6df4560fa",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.10961230133608768,\n",
       " 0.0952381218138807,\n",
       " 0.0806291940159265,\n",
       " 0.04248697564388759,\n",
       " 0.036198487322160466]"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "gene_distance"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "24fe6e95-2b05-4bc9-a827-9a026c9d3ea5",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.49983333333333335,\n",
       " 0.49978333333333336,\n",
       " 0.49978333333333336,\n",
       " 0.49978333333333336,\n",
       " 0.49978333333333336]"
      ]
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "shadow_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "ec2fbd3d-4764-4784-ad1f-9ff8c935e7c2",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.526, 0.5185, 0.5155, 0.511, 0.511]"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "LIRA_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "9194ce21-fc7d-4aa6-9b0e-1a35cbed0607",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.5580666666666667, 0.5493, 0.5428, 0.5237666666666667, 0.517]"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "base_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "af87f50b-3564-420d-bb08-29a4885c972e",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.751, 0.6925, 0.6475, 0.547, 0.5175]"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "risk_base_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "5d6b829d-21b6-437f-9abe-5a6bfa367160",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "1e9bd7a3-8594-434b-a3f3-555d4dd49e23",
   "metadata": {},
   "source": [
    "### 绘制训练轮次的影响"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "id": "cc91b000-a191-4ab8-b8cc-d49de530135c",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = 600\n",
    "shadow_result = []\n",
    "LIRA_result = []\n",
    "gene_distance = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "id": "d496b36f-0e3e-406e-8216-13d8576238e9",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)\n",
    "X_tmp = dataframe.iloc[:, range(600)].values\n",
    "Y_tmp = np.array([i for i in dataframe.loc[:, 600]])\n",
    "\n",
    "x_test_data = X_tmp[90000:110000]\n",
    "y_test_data = Y_tmp[90000:110000]\n",
    "test_data = CustomDataset(x_test_data, y_test_data, model_transform)\n",
    "test_dataloader = DataLoader(test_data, batch_size=batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "c4187ef7-c01b-45e0-b3ca-bf08db7d8e4b",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.603002  [  128/30013]\n",
      "loss: 1.796787  [12928/30013]\n",
      "loss: 1.205468  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.3%\n",
      "Test Error: \n",
      " Accuracy: 67.5%, Avg loss: 1.138990 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 71.0%, Avg loss: 1.075472 \n",
      "\n",
      " Error: \n",
      " Accuracy: 69.2%  \n",
      "\n",
      "AUC value is: 0.5\n",
      "Accuracy is: 0.49978333333333336\n",
      "AUC value is: 0.49187268791831584\n",
      "Accuracy is: 0.4904833333333333\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.197462  [  128/30013]\n",
      "loss: 0.922365  [12928/30013]\n",
      "loss: 0.690830  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.789946  [  128/30013]\n",
      "loss: 0.645867  [12928/30013]\n",
      "loss: 0.491940  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.3%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.613003  [  128/30013]\n",
      "loss: 0.488294  [12928/30013]\n",
      "loss: 0.359735  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.1%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.452823  [  128/30013]\n",
      "loss: 0.388609  [12928/30013]\n",
      "loss: 0.310947  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.6%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.362796  [  128/30013]\n",
      "loss: 0.323643  [12928/30013]\n",
      "loss: 0.241711  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.7%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.315609  [  128/30013]\n",
      "loss: 0.284416  [12928/30013]\n",
      "loss: 0.214238  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.1%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.288243  [  128/30013]\n",
      "loss: 0.283670  [12928/30013]\n",
      "loss: 0.186572  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.9%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.224774  [  128/30013]\n",
      "loss: 0.268019  [12928/30013]\n",
      "loss: 0.163795  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.1%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.166801  [  128/30013]\n",
      "loss: 0.176664  [12928/30013]\n",
      "loss: 0.211484  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.0%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.143939  [  128/30013]\n",
      "loss: 0.122112  [12928/30013]\n",
      "loss: 0.099837  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.8%\n",
      "Test Error: \n",
      " Accuracy: 85.0%, Avg loss: 0.417224 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 97.3%, Avg loss: 0.118921 \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      "AUC value is: 0.5131579435818805\n",
      "Accuracy is: 0.50005\n",
      "AUC value is: 0.5422853529402496\n",
      "Accuracy is: 0.49241666666666667\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.094375  [  128/30013]\n",
      "loss: 0.112280  [12928/30013]\n",
      "loss: 0.096828  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.1%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.074953  [  128/30013]\n",
      "loss: 0.106509  [12928/30013]\n",
      "loss: 0.143785  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.096708  [  128/30013]\n",
      "loss: 0.085182  [12928/30013]\n",
      "loss: 0.042867  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.091986  [  128/30013]\n",
      "loss: 0.082605  [12928/30013]\n",
      "loss: 0.054505  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.054995  [  128/30013]\n",
      "loss: 0.082010  [12928/30013]\n",
      "loss: 0.045649  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.078635  [  128/30013]\n",
      "loss: 0.070579  [12928/30013]\n",
      "loss: 0.058944  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.054162  [  128/30013]\n",
      "loss: 0.104554  [12928/30013]\n",
      "loss: 0.033725  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.042942  [  128/30013]\n",
      "loss: 0.037276  [12928/30013]\n",
      "loss: 0.022186  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.040145  [  128/30013]\n",
      "loss: 0.044437  [12928/30013]\n",
      "loss: 0.021620  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.026568  [  128/30013]\n",
      "loss: 0.052642  [12928/30013]\n",
      "loss: 0.019182  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Test Error: \n",
      " Accuracy: 85.4%, Avg loss: 0.445127 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 99.1%, Avg loss: 0.040760 \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.9%  \n",
      "\n",
      "AUC value is: 0.5562985483493941\n",
      "Accuracy is: 0.5076\n",
      "AUC value is: 0.5910443232072118\n",
      "Accuracy is: 0.5021\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.048079  [  128/30013]\n",
      "loss: 0.044033  [12928/30013]\n",
      "loss: 0.034367  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.026590  [  128/30013]\n",
      "loss: 0.030533  [12928/30013]\n",
      "loss: 0.074600  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.044520  [  128/30013]\n",
      "loss: 0.024995  [12928/30013]\n",
      "loss: 0.018689  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.048791  [  128/30013]\n",
      "loss: 0.024151  [12928/30013]\n",
      "loss: 0.025995  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.039210  [  128/30013]\n",
      "loss: 0.058489  [12928/30013]\n",
      "loss: 0.019032  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.040988  [  128/30013]\n",
      "loss: 0.053645  [12928/30013]\n",
      "loss: 0.013430  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.032259  [  128/30013]\n",
      "loss: 0.085496  [12928/30013]\n",
      "loss: 0.014098  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.030432  [  128/30013]\n",
      "loss: 0.025487  [12928/30013]\n",
      "loss: 0.013656  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.019340  [  128/30013]\n",
      "loss: 0.085886  [12928/30013]\n",
      "loss: 0.011989  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.100187  [  128/30013]\n",
      "loss: 0.012565  [12928/30013]\n",
      "loss: 0.022254  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Test Error: \n",
      " Accuracy: 85.5%, Avg loss: 0.460373 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 99.5%, Avg loss: 0.021772 \n",
      "\n",
      " Error: \n",
      " Accuracy: 92.2%  \n",
      "\n",
      "AUC value is: 0.5918923589220096\n",
      "Accuracy is: 0.5239166666666667\n",
      "AUC value is: 0.6223466424184251\n",
      "Accuracy is: 0.51425\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.010372  [  128/30013]\n",
      "loss: 0.007639  [12928/30013]\n",
      "loss: 0.017740  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.007965  [  128/30013]\n",
      "loss: 0.017528  [12928/30013]\n",
      "loss: 0.006821  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.009264  [  128/30013]\n",
      "loss: 0.025965  [12928/30013]\n",
      "loss: 0.004207  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.011020  [  128/30013]\n",
      "loss: 0.004765  [12928/30013]\n",
      "loss: 0.068273  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.016859  [  128/30013]\n",
      "loss: 0.006658  [12928/30013]\n",
      "loss: 0.016381  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.015579  [  128/30013]\n",
      "loss: 0.022807  [12928/30013]\n",
      "loss: 0.046748  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.053256  [  128/30013]\n",
      "loss: 0.059276  [12928/30013]\n",
      "loss: 0.013261  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.022677  [  128/30013]\n",
      "loss: 0.011571  [12928/30013]\n",
      "loss: 0.013195  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.016559  [  128/30013]\n",
      "loss: 0.006899  [12928/30013]\n",
      "loss: 0.002332  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.005222  [  128/30013]\n",
      "loss: 0.003464  [12928/30013]\n",
      "loss: 0.007483  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Test Error: \n",
      " Accuracy: 84.8%, Avg loss: 0.542404 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 99.5%, Avg loss: 0.018200 \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.9%  \n",
      "\n",
      "AUC value is: 0.6130667512314233\n",
      "Accuracy is: 0.5486\n",
      "AUC value is: 0.6462411313497236\n",
      "Accuracy is: 0.53045\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.008886  [  128/30013]\n",
      "loss: 0.019553  [12928/30013]\n",
      "loss: 0.002772  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.005325  [  128/30013]\n",
      "loss: 0.006745  [12928/30013]\n",
      "loss: 0.002019  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.079924  [  128/30013]\n",
      "loss: 0.041721  [12928/30013]\n",
      "loss: 0.013676  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.034620  [  128/30013]\n",
      "loss: 0.088289  [12928/30013]\n",
      "loss: 0.066740  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.008972  [  128/30013]\n",
      "loss: 0.007237  [12928/30013]\n",
      "loss: 0.006242  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.032670  [  128/30013]\n",
      "loss: 0.022481  [12928/30013]\n",
      "loss: 0.006564  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.018016  [  128/30013]\n",
      "loss: 0.016588  [12928/30013]\n",
      "loss: 0.002508  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.002057  [  128/30013]\n",
      "loss: 0.002692  [12928/30013]\n",
      "loss: 0.001767  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.006158  [  128/30013]\n",
      "loss: 0.007469  [12928/30013]\n",
      "loss: 0.025116  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.019341  [  128/30013]\n",
      "loss: 0.018537  [12928/30013]\n",
      "loss: 0.017831  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Test Error: \n",
      " Accuracy: 83.7%, Avg loss: 0.603126 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 99.1%, Avg loss: 0.030622 \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.3%  \n",
      "\n",
      "AUC value is: 0.6091559616081751\n",
      "Accuracy is: 0.5525666666666667\n",
      "AUC value is: 0.6438589631246275\n",
      "Accuracy is: 0.5347166666666666\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.018246  [  128/30013]\n",
      "loss: 0.019710  [12928/30013]\n",
      "loss: 0.008366  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.103968  [  128/30013]\n",
      "loss: 0.012676  [12928/30013]\n",
      "loss: 0.020401  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.006132  [  128/30013]\n",
      "loss: 0.003860  [12928/30013]\n",
      "loss: 0.003114  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.005463  [  128/30013]\n",
      "loss: 0.015817  [12928/30013]\n",
      "loss: 0.001381  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.004378  [  128/30013]\n",
      "loss: 0.001081  [12928/30013]\n",
      "loss: 0.002033  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.001086  [  128/30013]\n",
      "loss: 0.000817  [12928/30013]\n",
      "loss: 0.000716  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000785  [  128/30013]\n",
      "loss: 0.000673  [12928/30013]\n",
      "loss: 0.000580  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000686  [  128/30013]\n",
      "loss: 0.000582  [12928/30013]\n",
      "loss: 0.000503  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000601  [  128/30013]\n",
      "loss: 0.000515  [12928/30013]\n",
      "loss: 0.000448  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000535  [  128/30013]\n",
      "loss: 0.000463  [12928/30013]\n",
      "loss: 0.000405  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Test Error: \n",
      " Accuracy: 87.3%, Avg loss: 0.463792 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 100.0%, Avg loss: 0.000445 \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.5%  \n",
      "\n",
      "AUC value is: 0.6773119638508021\n",
      "Accuracy is: 0.6077333333333333\n",
      "AUC value is: 0.7260584185598586\n",
      "Accuracy is: 0.5772333333333334\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000482  [  128/30013]\n",
      "loss: 0.000420  [12928/30013]\n",
      "loss: 0.000369  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000437  [  128/30013]\n",
      "loss: 0.000384  [12928/30013]\n",
      "loss: 0.000339  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000399  [  128/30013]\n",
      "loss: 0.000353  [12928/30013]\n",
      "loss: 0.000311  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000365  [  128/30013]\n",
      "loss: 0.000325  [12928/30013]\n",
      "loss: 0.000287  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000336  [  128/30013]\n",
      "loss: 0.000300  [12928/30013]\n",
      "loss: 0.000265  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000309  [  128/30013]\n",
      "loss: 0.000277  [12928/30013]\n",
      "loss: 0.000245  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000286  [  128/30013]\n",
      "loss: 0.000256  [12928/30013]\n",
      "loss: 0.000227  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000264  [  128/30013]\n",
      "loss: 0.000237  [12928/30013]\n",
      "loss: 0.000210  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000244  [  128/30013]\n",
      "loss: 0.000220  [12928/30013]\n",
      "loss: 0.000194  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000226  [  128/30013]\n",
      "loss: 0.000204  [12928/30013]\n",
      "loss: 0.000179  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Test Error: \n",
      " Accuracy: 87.7%, Avg loss: 0.474038 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 100.0%, Avg loss: 0.000194 \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.5%  \n",
      "\n",
      "AUC value is: 0.6877229630279786\n",
      "Accuracy is: 0.6384666666666666\n",
      "AUC value is: 0.7525448524223112\n",
      "Accuracy is: 0.6008333333333333\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000209  [  128/30013]\n",
      "loss: 0.000189  [12928/30013]\n",
      "loss: 0.000166  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000193  [  128/30013]\n",
      "loss: 0.000175  [12928/30013]\n",
      "loss: 0.000153  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000179  [  128/30013]\n",
      "loss: 0.000162  [12928/30013]\n",
      "loss: 0.000141  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000165  [  128/30013]\n",
      "loss: 0.000150  [12928/30013]\n",
      "loss: 0.000130  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000153  [  128/30013]\n",
      "loss: 0.000138  [12928/30013]\n",
      "loss: 0.000120  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000141  [  128/30013]\n",
      "loss: 0.000127  [12928/30013]\n",
      "loss: 0.000110  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000130  [  128/30013]\n",
      "loss: 0.000117  [12928/30013]\n",
      "loss: 0.000101  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000119  [  128/30013]\n",
      "loss: 0.000108  [12928/30013]\n",
      "loss: 0.000092  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000110  [  128/30013]\n",
      "loss: 0.000099  [12928/30013]\n",
      "loss: 0.000085  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000101  [  128/30013]\n",
      "loss: 0.000091  [12928/30013]\n",
      "loss: 0.000077  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Test Error: \n",
      " Accuracy: 87.8%, Avg loss: 0.491931 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 100.0%, Avg loss: 0.000084 \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.6%  \n",
      "\n",
      "AUC value is: 0.6936781941462387\n",
      "Accuracy is: 0.68535\n",
      "AUC value is: 0.7796289575081042\n",
      "Accuracy is: 0.6289333333333333\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000092  [  128/30013]\n",
      "loss: 0.000083  [12928/30013]\n",
      "loss: 0.000071  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000084  [  128/30013]\n",
      "loss: 0.000076  [12928/30013]\n",
      "loss: 0.000064  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000077  [  128/30013]\n",
      "loss: 0.000070  [12928/30013]\n",
      "loss: 0.000059  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000070  [  128/30013]\n",
      "loss: 0.000064  [12928/30013]\n",
      "loss: 0.000053  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000064  [  128/30013]\n",
      "loss: 0.000058  [12928/30013]\n",
      "loss: 0.000049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000058  [  128/30013]\n",
      "loss: 0.000053  [12928/30013]\n",
      "loss: 0.000044  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000053  [  128/30013]\n",
      "loss: 0.000048  [12928/30013]\n",
      "loss: 0.000040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000048  [  128/30013]\n",
      "loss: 0.000044  [12928/30013]\n",
      "loss: 0.000036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000043  [  128/30013]\n",
      "loss: 0.000039  [12928/30013]\n",
      "loss: 0.000033  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000039  [  128/30013]\n",
      "loss: 0.000036  [12928/30013]\n",
      "loss: 0.000029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Test Error: \n",
      " Accuracy: 87.7%, Avg loss: 0.518704 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 100.0%, Avg loss: 0.000032 \n",
      "\n",
      " Error: \n",
      " Accuracy: 93.7%  \n",
      "\n",
      "AUC value is: 0.6994913224600372\n",
      "Accuracy is: 0.73555\n",
      "AUC value is: 0.8090692374807791\n",
      "Accuracy is: 0.6667\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000035  [  128/30013]\n",
      "loss: 0.000032  [12928/30013]\n",
      "loss: 0.000026  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000032  [  128/30013]\n",
      "loss: 0.000029  [12928/30013]\n",
      "loss: 0.000024  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000028  [  128/30013]\n",
      "loss: 0.000026  [12928/30013]\n",
      "loss: 0.000021  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000026  [  128/30013]\n",
      "loss: 0.000024  [12928/30013]\n",
      "loss: 0.000019  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000023  [  128/30013]\n",
      "loss: 0.000021  [12928/30013]\n",
      "loss: 0.000017  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000021  [  128/30013]\n",
      "loss: 0.000019  [12928/30013]\n",
      "loss: 0.000015  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000018  [  128/30013]\n",
      "loss: 0.000017  [12928/30013]\n",
      "loss: 0.000014  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000016  [  128/30013]\n",
      "loss: 0.000015  [12928/30013]\n",
      "loss: 0.000012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000015  [  128/30013]\n",
      "loss: 0.000014  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n"
     ]
    }
   ],
   "source": [
    "(x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp = load_Purchase100_limited(0, 100, prop_keep=0.5, seed=0)\n",
    "training_data = CustomDataset(x_train, y_train, model_transform)\n",
    "train_dataloader = DataLoader(training_data, batch_size=batch_size)\n",
    "if model in ['NN', 'NN_4layer']:\n",
    "    TargetModel = globals()['create_{}_model'.format(model)](x_train.shape[1], y_train.max()+1)\n",
    "elif model == 'CNN':\n",
    "    TargetModel = globals()['create_{}_model'.format(model)](y_train.max()+1, data_name)\n",
    "# print(TargetModel)\n",
    "TargetModel.to(device)\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.Adam(TargetModel.parameters(), lr=5e-4)   #, weight_decay=1e-4\n",
    "for t in range(100):\n",
    "    print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "    train(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "\n",
    "    if t%10 == 0:\n",
    "        test_acc = evaluate(test_dataloader, TargetModel, loss_fn, device)\n",
    "        train_acc = evaluate(train_dataloader, TargetModel, loss_fn, device)\n",
    "        distance = train_acc - test_acc\n",
    "        gene_distance.append(distance)\n",
    "        \n",
    "        mem_label = train_keep[0]\n",
    "        # 加载目标模型\n",
    "        \n",
    "        \n",
    "        conf_data, label_data = get_model_pred(all_dataloader, TargetModel, device)\n",
    "        conf_data = conf_data.detach().cpu().numpy()\n",
    "        label_data = label_data.detach().cpu().numpy()\n",
    "        conf_data = conf_data.astype(np.float64)\n",
    "        score_tar = cal_score(conf_data.copy(), label_data)\n",
    "    \n",
    "        # 执行影子模型攻击\n",
    "        targetX = conf_data\n",
    "        pred_cor = (targetX.argmax(1) == Y_data).astype(int)\n",
    "        targetY = mem_label\n",
    "        targetX, _ = get_top_k_conf(3, targetX, targetX)\n",
    "        targetX = np.concatenate((targetX, pred_cor.reshape(pred_cor.shape[0],1)), 1)\n",
    "        targetX = targetX.astype(np.float32)\n",
    "        shadow_attack_data = CustomDataset(targetX, targetY, attack_transform)\n",
    "        shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "        attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "        attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "        accuracy = evaluate_ROC(attack_test_scores, attack_test_mem)\n",
    "        shadow_result.append(accuracy)\n",
    "    \n",
    "        # 执行风险评估攻击\n",
    "        score_tar = cal_score(conf_data.copy(), label_data)\n",
    "        pri_risk_t = pri_risk_all\n",
    "        pri_risk_rank_t = np.argsort(pri_risk_t)\n",
    "        pri_risk_rank_t = np.flip(pri_risk_rank_t)\n",
    "    \n",
    "        pred_result = LIRA_attack(train_keep, score_all, score_tar, mem_label)\n",
    "        evaluate_ROC(pred_result, mem_label, threshold=0)\n",
    "        pred_clip = pred_result[pri_risk_rank_t[:x]]\n",
    "        mem_clip = mem_label[pri_risk_rank_t[:x]]\n",
    "        pred_clip = pred_clip > 0\n",
    "        accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "        LIRA_result.append(accuracy)\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4f72cdcc-f4a8-4786-890e-7911bca756ae",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "139fa587-0fb3-4343-84bc-5eed8688811a",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.49978333333333336,\n",
       " 0.50005,\n",
       " 0.5076,\n",
       " 0.5239166666666667,\n",
       " 0.5486,\n",
       " 0.5525666666666667,\n",
       " 0.6077333333333333,\n",
       " 0.6384666666666666,\n",
       " 0.68535,\n",
       " 0.73555]"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "shadow_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "id": "6576ed13-123f-4755-b860-a9b0edbb5303",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.46,\n",
       " 0.6083333333333333,\n",
       " 0.8183333333333334,\n",
       " 0.9166666666666666,\n",
       " 0.9433333333333334,\n",
       " 0.8933333333333333,\n",
       " 0.99,\n",
       " 0.9883333333333333,\n",
       " 0.9866666666666667,\n",
       " 0.9883333333333333]"
      ]
     },
     "execution_count": 38,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "LIRA_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "2c5fe301-80c4-4059-9700-59371efd32bb",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.034575742178389324,\n",
       " 0.1232948405690868,\n",
       " 0.1372539416252957,\n",
       " 0.1398353546796388,\n",
       " 0.14720207909905714,\n",
       " 0.15367066604471402,\n",
       " 0.12670000000000003,\n",
       " 0.12319999999999998,\n",
       " 0.12190000000000001,\n",
       " 0.12304999999999999]"
      ]
     },
     "execution_count": 39,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "gene_distance"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4b64ea47-4fae-4ac9-b3bf-7c0100dd32c5",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "id": "4e0c8834-f472-43db-908b-fffdd80642f5",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.606454  [  128/30013]\n",
      "loss: 1.801891  [12928/30013]\n",
      "loss: 1.215063  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.3%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.212727  [  128/30013]\n",
      "loss: 0.913283  [12928/30013]\n",
      "loss: 0.682189  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.5%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.807240  [  128/30013]\n",
      "loss: 0.632205  [12928/30013]\n",
      "loss: 0.469458  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.6%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.652796  [  128/30013]\n",
      "loss: 0.462268  [12928/30013]\n",
      "loss: 0.346768  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.5%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.468201  [  128/30013]\n",
      "loss: 0.368615  [12928/30013]\n",
      "loss: 0.301122  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.1%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.335343  [  128/30013]\n",
      "loss: 0.324734  [12928/30013]\n",
      "loss: 0.249268  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.4%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.303785  [  128/30013]\n",
      "loss: 0.259111  [12928/30013]\n",
      "loss: 0.220006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.6%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.219399  [  128/30013]\n",
      "loss: 0.217707  [12928/30013]\n",
      "loss: 0.166725  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.6%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.219922  [  128/30013]\n",
      "loss: 0.217258  [12928/30013]\n",
      "loss: 0.153256  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.4%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.178763  [  128/30013]\n",
      "loss: 0.191040  [12928/30013]\n",
      "loss: 0.155750  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.4%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.604876  [  128/30013]\n",
      "loss: 1.809958  [12928/30013]\n",
      "loss: 1.212152  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.3%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.178735  [  128/30013]\n",
      "loss: 0.918881  [12928/30013]\n",
      "loss: 0.648251  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.1%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.787078  [  128/30013]\n",
      "loss: 0.668121  [12928/30013]\n",
      "loss: 0.459828  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.0%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.615132  [  128/30013]\n",
      "loss: 0.496637  [12928/30013]\n",
      "loss: 0.360194  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.3%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.461480  [  128/30013]\n",
      "loss: 0.375140  [12928/30013]\n",
      "loss: 0.301011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.8%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.331837  [  128/30013]\n",
      "loss: 0.328463  [12928/30013]\n",
      "loss: 0.278335  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.4%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.253781  [  128/30013]\n",
      "loss: 0.320584  [12928/30013]\n",
      "loss: 0.198802  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.5%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.281307  [  128/30013]\n",
      "loss: 0.302514  [12928/30013]\n",
      "loss: 0.163517  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.2%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.195631  [  128/30013]\n",
      "loss: 0.190637  [12928/30013]\n",
      "loss: 0.166222  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.8%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.149549  [  128/30013]\n",
      "loss: 0.229028  [12928/30013]\n",
      "loss: 0.150883  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.1%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.143515  [  128/30013]\n",
      "loss: 0.133223  [12928/30013]\n",
      "loss: 0.105884  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.117922  [  128/30013]\n",
      "loss: 0.163224  [12928/30013]\n",
      "loss: 0.158279  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.8%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.087547  [  128/30013]\n",
      "loss: 0.133981  [12928/30013]\n",
      "loss: 0.118352  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.065068  [  128/30013]\n",
      "loss: 0.115737  [12928/30013]\n",
      "loss: 0.109263  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.0%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.071013  [  128/30013]\n",
      "loss: 0.139909  [12928/30013]\n",
      "loss: 0.069848  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.0%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.069647  [  128/30013]\n",
      "loss: 0.066500  [12928/30013]\n",
      "loss: 0.076942  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.087225  [  128/30013]\n",
      "loss: 0.119198  [12928/30013]\n",
      "loss: 0.039933  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.092795  [  128/30013]\n",
      "loss: 0.051813  [12928/30013]\n",
      "loss: 0.025273  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.047301  [  128/30013]\n",
      "loss: 0.065598  [12928/30013]\n",
      "loss: 0.031315  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.040259  [  128/30013]\n",
      "loss: 0.033863  [12928/30013]\n",
      "loss: 0.037614  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.617499  [  128/30013]\n",
      "loss: 1.821188  [12928/30013]\n",
      "loss: 1.189927  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.8%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.213854  [  128/30013]\n",
      "loss: 0.924795  [12928/30013]\n",
      "loss: 0.679626  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.9%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.858835  [  128/30013]\n",
      "loss: 0.634354  [12928/30013]\n",
      "loss: 0.498984  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.6%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.668576  [  128/30013]\n",
      "loss: 0.487010  [12928/30013]\n",
      "loss: 0.359658  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.3%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.510490  [  128/30013]\n",
      "loss: 0.386099  [12928/30013]\n",
      "loss: 0.312101  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.9%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.377486  [  128/30013]\n",
      "loss: 0.317841  [12928/30013]\n",
      "loss: 0.282250  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.9%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.318049  [  128/30013]\n",
      "loss: 0.286267  [12928/30013]\n",
      "loss: 0.279320  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.9%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.241931  [  128/30013]\n",
      "loss: 0.270809  [12928/30013]\n",
      "loss: 0.149503  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.3%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.250392  [  128/30013]\n",
      "loss: 0.238011  [12928/30013]\n",
      "loss: 0.165486  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.3%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.187996  [  128/30013]\n",
      "loss: 0.160328  [12928/30013]\n",
      "loss: 0.285063  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.7%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.113392  [  128/30013]\n",
      "loss: 0.145180  [12928/30013]\n",
      "loss: 0.236591  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.3%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.104479  [  128/30013]\n",
      "loss: 0.133102  [12928/30013]\n",
      "loss: 0.138539  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.5%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.098650  [  128/30013]\n",
      "loss: 0.116419  [12928/30013]\n",
      "loss: 0.074928  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.3%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.128660  [  128/30013]\n",
      "loss: 0.085617  [12928/30013]\n",
      "loss: 0.041312  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.073635  [  128/30013]\n",
      "loss: 0.077671  [12928/30013]\n",
      "loss: 0.037344  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.073163  [  128/30013]\n",
      "loss: 0.097645  [12928/30013]\n",
      "loss: 0.039096  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.051811  [  128/30013]\n",
      "loss: 0.067638  [12928/30013]\n",
      "loss: 0.053965  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.038767  [  128/30013]\n",
      "loss: 0.084134  [12928/30013]\n",
      "loss: 0.064485  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.044251  [  128/30013]\n",
      "loss: 0.126006  [12928/30013]\n",
      "loss: 0.045923  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.056306  [  128/30013]\n",
      "loss: 0.040244  [12928/30013]\n",
      "loss: 0.071620  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.029085  [  128/30013]\n",
      "loss: 0.108528  [12928/30013]\n",
      "loss: 0.022484  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.043266  [  128/30013]\n",
      "loss: 0.024926  [12928/30013]\n",
      "loss: 0.023118  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.071795  [  128/30013]\n",
      "loss: 0.021881  [12928/30013]\n",
      "loss: 0.072942  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.052587  [  128/30013]\n",
      "loss: 0.021361  [12928/30013]\n",
      "loss: 0.019363  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.070128  [  128/30013]\n",
      "loss: 0.018830  [12928/30013]\n",
      "loss: 0.027584  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.027729  [  128/30013]\n",
      "loss: 0.086427  [12928/30013]\n",
      "loss: 0.050245  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.117130  [  128/30013]\n",
      "loss: 0.026803  [12928/30013]\n",
      "loss: 0.013047  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.035609  [  128/30013]\n",
      "loss: 0.024208  [12928/30013]\n",
      "loss: 0.008304  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.046570  [  128/30013]\n",
      "loss: 0.035347  [12928/30013]\n",
      "loss: 0.009249  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.027256  [  128/30013]\n",
      "loss: 0.044493  [12928/30013]\n",
      "loss: 0.014433  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.601987  [  128/30013]\n",
      "loss: 1.794536  [12928/30013]\n",
      "loss: 1.210485  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.5%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.151291  [  128/30013]\n",
      "loss: 0.899640  [12928/30013]\n",
      "loss: 0.676930  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.754215  [  128/30013]\n",
      "loss: 0.627715  [12928/30013]\n",
      "loss: 0.492697  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.3%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.604679  [  128/30013]\n",
      "loss: 0.493652  [12928/30013]\n",
      "loss: 0.364145  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.4%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.446077  [  128/30013]\n",
      "loss: 0.380697  [12928/30013]\n",
      "loss: 0.319304  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.9%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.343294  [  128/30013]\n",
      "loss: 0.315387  [12928/30013]\n",
      "loss: 0.260704  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.6%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.268193  [  128/30013]\n",
      "loss: 0.296449  [12928/30013]\n",
      "loss: 0.279903  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.2%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.308780  [  128/30013]\n",
      "loss: 0.295634  [12928/30013]\n",
      "loss: 0.184672  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.8%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.237350  [  128/30013]\n",
      "loss: 0.312427  [12928/30013]\n",
      "loss: 0.164029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.4%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.142839  [  128/30013]\n",
      "loss: 0.175634  [12928/30013]\n",
      "loss: 0.172087  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.9%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.127894  [  128/30013]\n",
      "loss: 0.105273  [12928/30013]\n",
      "loss: 0.157675  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.141638  [  128/30013]\n",
      "loss: 0.155251  [12928/30013]\n",
      "loss: 0.118012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.081228  [  128/30013]\n",
      "loss: 0.099047  [12928/30013]\n",
      "loss: 0.070898  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.8%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.089828  [  128/30013]\n",
      "loss: 0.061650  [12928/30013]\n",
      "loss: 0.134656  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.1%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.126615  [  128/30013]\n",
      "loss: 0.076585  [12928/30013]\n",
      "loss: 0.087518  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.2%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.077894  [  128/30013]\n",
      "loss: 0.098850  [12928/30013]\n",
      "loss: 0.075722  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.2%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.042732  [  128/30013]\n",
      "loss: 0.137825  [12928/30013]\n",
      "loss: 0.054002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.4%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.036797  [  128/30013]\n",
      "loss: 0.162621  [12928/30013]\n",
      "loss: 0.061949  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.105482  [  128/30013]\n",
      "loss: 0.069892  [12928/30013]\n",
      "loss: 0.085797  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.030951  [  128/30013]\n",
      "loss: 0.087593  [12928/30013]\n",
      "loss: 0.016761  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.038450  [  128/30013]\n",
      "loss: 0.079717  [12928/30013]\n",
      "loss: 0.021036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.024773  [  128/30013]\n",
      "loss: 0.045385  [12928/30013]\n",
      "loss: 0.018789  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.015521  [  128/30013]\n",
      "loss: 0.027361  [12928/30013]\n",
      "loss: 0.018855  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.017490  [  128/30013]\n",
      "loss: 0.026035  [12928/30013]\n",
      "loss: 0.013002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.019863  [  128/30013]\n",
      "loss: 0.083404  [12928/30013]\n",
      "loss: 0.012068  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.012044  [  128/30013]\n",
      "loss: 0.010713  [12928/30013]\n",
      "loss: 0.007047  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.010652  [  128/30013]\n",
      "loss: 0.008339  [12928/30013]\n",
      "loss: 0.004015  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.005336  [  128/30013]\n",
      "loss: 0.005024  [12928/30013]\n",
      "loss: 0.003007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.006527  [  128/30013]\n",
      "loss: 0.024577  [12928/30013]\n",
      "loss: 0.009640  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.019673  [  128/30013]\n",
      "loss: 0.073590  [12928/30013]\n",
      "loss: 0.041774  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.055896  [  128/30013]\n",
      "loss: 0.030927  [12928/30013]\n",
      "loss: 0.024719  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.022518  [  128/30013]\n",
      "loss: 0.014470  [12928/30013]\n",
      "loss: 0.042437  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.018901  [  128/30013]\n",
      "loss: 0.038388  [12928/30013]\n",
      "loss: 0.018406  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.008206  [  128/30013]\n",
      "loss: 0.011458  [12928/30013]\n",
      "loss: 0.015882  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.017374  [  128/30013]\n",
      "loss: 0.013833  [12928/30013]\n",
      "loss: 0.009378  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.015167  [  128/30013]\n",
      "loss: 0.022330  [12928/30013]\n",
      "loss: 0.002897  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.025915  [  128/30013]\n",
      "loss: 0.006702  [12928/30013]\n",
      "loss: 0.002614  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.006485  [  128/30013]\n",
      "loss: 0.036521  [12928/30013]\n",
      "loss: 0.007910  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.012212  [  128/30013]\n",
      "loss: 0.019345  [12928/30013]\n",
      "loss: 0.043468  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.047718  [  128/30013]\n",
      "loss: 0.006027  [12928/30013]\n",
      "loss: 0.095169  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.606045  [  128/30013]\n",
      "loss: 1.800897  [12928/30013]\n",
      "loss: 1.140261  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.4%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.167359  [  128/30013]\n",
      "loss: 0.931532  [12928/30013]\n",
      "loss: 0.674730  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.759957  [  128/30013]\n",
      "loss: 0.658707  [12928/30013]\n",
      "loss: 0.472276  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.0%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.693657  [  128/30013]\n",
      "loss: 0.512465  [12928/30013]\n",
      "loss: 0.333168  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.3%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.477788  [  128/30013]\n",
      "loss: 0.395736  [12928/30013]\n",
      "loss: 0.302751  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.7%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.333116  [  128/30013]\n",
      "loss: 0.349829  [12928/30013]\n",
      "loss: 0.282076  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.0%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.283040  [  128/30013]\n",
      "loss: 0.363471  [12928/30013]\n",
      "loss: 0.199892  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.1%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.267077  [  128/30013]\n",
      "loss: 0.280607  [12928/30013]\n",
      "loss: 0.149094  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.4%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.197745  [  128/30013]\n",
      "loss: 0.274864  [12928/30013]\n",
      "loss: 0.145773  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.6%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.205841  [  128/30013]\n",
      "loss: 0.195411  [12928/30013]\n",
      "loss: 0.164575  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.8%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.151070  [  128/30013]\n",
      "loss: 0.135895  [12928/30013]\n",
      "loss: 0.214488  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.5%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.139522  [  128/30013]\n",
      "loss: 0.110128  [12928/30013]\n",
      "loss: 0.080093  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.150915  [  128/30013]\n",
      "loss: 0.089317  [12928/30013]\n",
      "loss: 0.066638  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.7%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.077966  [  128/30013]\n",
      "loss: 0.116864  [12928/30013]\n",
      "loss: 0.113001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.1%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.100447  [  128/30013]\n",
      "loss: 0.095468  [12928/30013]\n",
      "loss: 0.093051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.079071  [  128/30013]\n",
      "loss: 0.105016  [12928/30013]\n",
      "loss: 0.045365  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.097455  [  128/30013]\n",
      "loss: 0.061063  [12928/30013]\n",
      "loss: 0.035250  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.081605  [  128/30013]\n",
      "loss: 0.093750  [12928/30013]\n",
      "loss: 0.064160  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.097842  [  128/30013]\n",
      "loss: 0.038944  [12928/30013]\n",
      "loss: 0.044324  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.036129  [  128/30013]\n",
      "loss: 0.033020  [12928/30013]\n",
      "loss: 0.024551  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.038788  [  128/30013]\n",
      "loss: 0.030013  [12928/30013]\n",
      "loss: 0.015828  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.054356  [  128/30013]\n",
      "loss: 0.020010  [12928/30013]\n",
      "loss: 0.016907  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.014753  [  128/30013]\n",
      "loss: 0.021995  [12928/30013]\n",
      "loss: 0.014337  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.013661  [  128/30013]\n",
      "loss: 0.011620  [12928/30013]\n",
      "loss: 0.031653  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.010484  [  128/30013]\n",
      "loss: 0.013893  [12928/30013]\n",
      "loss: 0.007015  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.009073  [  128/30013]\n",
      "loss: 0.007111  [12928/30013]\n",
      "loss: 0.006320  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.007506  [  128/30013]\n",
      "loss: 0.073814  [12928/30013]\n",
      "loss: 0.022645  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.021094  [  128/30013]\n",
      "loss: 0.092628  [12928/30013]\n",
      "loss: 0.185248  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.049503  [  128/30013]\n",
      "loss: 0.026793  [12928/30013]\n",
      "loss: 0.027620  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.043657  [  128/30013]\n",
      "loss: 0.053425  [12928/30013]\n",
      "loss: 0.017212  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.019373  [  128/30013]\n",
      "loss: 0.015028  [12928/30013]\n",
      "loss: 0.011977  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.012364  [  128/30013]\n",
      "loss: 0.013886  [12928/30013]\n",
      "loss: 0.057781  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.018907  [  128/30013]\n",
      "loss: 0.014813  [12928/30013]\n",
      "loss: 0.020919  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.013273  [  128/30013]\n",
      "loss: 0.014086  [12928/30013]\n",
      "loss: 0.021316  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.008443  [  128/30013]\n",
      "loss: 0.019435  [12928/30013]\n",
      "loss: 0.011538  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.023127  [  128/30013]\n",
      "loss: 0.012683  [12928/30013]\n",
      "loss: 0.007771  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.028606  [  128/30013]\n",
      "loss: 0.011870  [12928/30013]\n",
      "loss: 0.013346  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.014609  [  128/30013]\n",
      "loss: 0.035661  [12928/30013]\n",
      "loss: 0.026296  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.012845  [  128/30013]\n",
      "loss: 0.018041  [12928/30013]\n",
      "loss: 0.047439  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.071719  [  128/30013]\n",
      "loss: 0.009341  [12928/30013]\n",
      "loss: 0.004376  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.008408  [  128/30013]\n",
      "loss: 0.029964  [12928/30013]\n",
      "loss: 0.011231  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.006212  [  128/30013]\n",
      "loss: 0.014768  [12928/30013]\n",
      "loss: 0.011986  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.022444  [  128/30013]\n",
      "loss: 0.003115  [12928/30013]\n",
      "loss: 0.001091  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.003154  [  128/30013]\n",
      "loss: 0.002386  [12928/30013]\n",
      "loss: 0.003539  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.002550  [  128/30013]\n",
      "loss: 0.002616  [12928/30013]\n",
      "loss: 0.007422  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.039396  [  128/30013]\n",
      "loss: 0.040156  [12928/30013]\n",
      "loss: 0.007944  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.034507  [  128/30013]\n",
      "loss: 0.012503  [12928/30013]\n",
      "loss: 0.024465  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.012563  [  128/30013]\n",
      "loss: 0.005863  [12928/30013]\n",
      "loss: 0.003775  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.002935  [  128/30013]\n",
      "loss: 0.003941  [12928/30013]\n",
      "loss: 0.001727  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.001370  [  128/30013]\n",
      "loss: 0.002200  [12928/30013]\n",
      "loss: 0.001237  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.608316  [  128/30013]\n",
      "loss: 1.810182  [12928/30013]\n",
      "loss: 1.224698  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.7%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.171829  [  128/30013]\n",
      "loss: 0.915673  [12928/30013]\n",
      "loss: 0.694983  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.4%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.771917  [  128/30013]\n",
      "loss: 0.633630  [12928/30013]\n",
      "loss: 0.508189  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.0%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.612786  [  128/30013]\n",
      "loss: 0.509227  [12928/30013]\n",
      "loss: 0.353340  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.5%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.473996  [  128/30013]\n",
      "loss: 0.387398  [12928/30013]\n",
      "loss: 0.285902  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.1%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.331836  [  128/30013]\n",
      "loss: 0.309281  [12928/30013]\n",
      "loss: 0.273471  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.3%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.293101  [  128/30013]\n",
      "loss: 0.282186  [12928/30013]\n",
      "loss: 0.201772  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.4%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.290733  [  128/30013]\n",
      "loss: 0.290639  [12928/30013]\n",
      "loss: 0.182156  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.4%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.301046  [  128/30013]\n",
      "loss: 0.264000  [12928/30013]\n",
      "loss: 0.186034  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.9%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.237689  [  128/30013]\n",
      "loss: 0.204444  [12928/30013]\n",
      "loss: 0.161857  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.2%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.177874  [  128/30013]\n",
      "loss: 0.167144  [12928/30013]\n",
      "loss: 0.149286  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.096103  [  128/30013]\n",
      "loss: 0.123452  [12928/30013]\n",
      "loss: 0.091008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.5%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.086328  [  128/30013]\n",
      "loss: 0.112381  [12928/30013]\n",
      "loss: 0.072737  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.4%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.103276  [  128/30013]\n",
      "loss: 0.099189  [12928/30013]\n",
      "loss: 0.078488  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.072963  [  128/30013]\n",
      "loss: 0.061325  [12928/30013]\n",
      "loss: 0.061372  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.063911  [  128/30013]\n",
      "loss: 0.084984  [12928/30013]\n",
      "loss: 0.055094  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.064682  [  128/30013]\n",
      "loss: 0.120077  [12928/30013]\n",
      "loss: 0.070905  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.077056  [  128/30013]\n",
      "loss: 0.075894  [12928/30013]\n",
      "loss: 0.066509  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.041351  [  128/30013]\n",
      "loss: 0.070270  [12928/30013]\n",
      "loss: 0.060987  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.042775  [  128/30013]\n",
      "loss: 0.055288  [12928/30013]\n",
      "loss: 0.039914  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.029094  [  128/30013]\n",
      "loss: 0.059696  [12928/30013]\n",
      "loss: 0.021342  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.043072  [  128/30013]\n",
      "loss: 0.031186  [12928/30013]\n",
      "loss: 0.019173  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.083497  [  128/30013]\n",
      "loss: 0.040762  [12928/30013]\n",
      "loss: 0.013845  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.039092  [  128/30013]\n",
      "loss: 0.037816  [12928/30013]\n",
      "loss: 0.045526  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.059292  [  128/30013]\n",
      "loss: 0.020795  [12928/30013]\n",
      "loss: 0.014285  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.020657  [  128/30013]\n",
      "loss: 0.050821  [12928/30013]\n",
      "loss: 0.008789  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.013040  [  128/30013]\n",
      "loss: 0.038897  [12928/30013]\n",
      "loss: 0.017424  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.022236  [  128/30013]\n",
      "loss: 0.057921  [12928/30013]\n",
      "loss: 0.029074  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.015633  [  128/30013]\n",
      "loss: 0.058623  [12928/30013]\n",
      "loss: 0.016097  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.012981  [  128/30013]\n",
      "loss: 0.016340  [12928/30013]\n",
      "loss: 0.010353  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.032914  [  128/30013]\n",
      "loss: 0.008797  [12928/30013]\n",
      "loss: 0.007885  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.010057  [  128/30013]\n",
      "loss: 0.009965  [12928/30013]\n",
      "loss: 0.004760  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.005183  [  128/30013]\n",
      "loss: 0.005127  [12928/30013]\n",
      "loss: 0.002380  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.003058  [  128/30013]\n",
      "loss: 0.003248  [12928/30013]\n",
      "loss: 0.001989  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.002736  [  128/30013]\n",
      "loss: 0.002653  [12928/30013]\n",
      "loss: 0.001704  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.002370  [  128/30013]\n",
      "loss: 0.002330  [12928/30013]\n",
      "loss: 0.001520  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.002081  [  128/30013]\n",
      "loss: 0.002090  [12928/30013]\n",
      "loss: 0.001375  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.001858  [  128/30013]\n",
      "loss: 0.001891  [12928/30013]\n",
      "loss: 0.001252  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.001673  [  128/30013]\n",
      "loss: 0.001717  [12928/30013]\n",
      "loss: 0.001144  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.001512  [  128/30013]\n",
      "loss: 0.001562  [12928/30013]\n",
      "loss: 0.001045  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.001369  [  128/30013]\n",
      "loss: 0.001421  [12928/30013]\n",
      "loss: 0.000955  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.001240  [  128/30013]\n",
      "loss: 0.001292  [12928/30013]\n",
      "loss: 0.000873  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.001123  [  128/30013]\n",
      "loss: 0.001174  [12928/30013]\n",
      "loss: 0.000796  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.001016  [  128/30013]\n",
      "loss: 0.001066  [12928/30013]\n",
      "loss: 0.000725  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000919  [  128/30013]\n",
      "loss: 0.000966  [12928/30013]\n",
      "loss: 0.000660  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000829  [  128/30013]\n",
      "loss: 0.000873  [12928/30013]\n",
      "loss: 0.000599  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000748  [  128/30013]\n",
      "loss: 0.000788  [12928/30013]\n",
      "loss: 0.000543  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000673  [  128/30013]\n",
      "loss: 0.000709  [12928/30013]\n",
      "loss: 0.000492  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000605  [  128/30013]\n",
      "loss: 0.000637  [12928/30013]\n",
      "loss: 0.000444  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000543  [  128/30013]\n",
      "loss: 0.000571  [12928/30013]\n",
      "loss: 0.000401  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000487  [  128/30013]\n",
      "loss: 0.000511  [12928/30013]\n",
      "loss: 0.000361  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000436  [  128/30013]\n",
      "loss: 0.000456  [12928/30013]\n",
      "loss: 0.000325  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000390  [  128/30013]\n",
      "loss: 0.000406  [12928/30013]\n",
      "loss: 0.000292  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000348  [  128/30013]\n",
      "loss: 0.000361  [12928/30013]\n",
      "loss: 0.000261  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000311  [  128/30013]\n",
      "loss: 0.000321  [12928/30013]\n",
      "loss: 0.000234  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000277  [  128/30013]\n",
      "loss: 0.000285  [12928/30013]\n",
      "loss: 0.000209  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000246  [  128/30013]\n",
      "loss: 0.000252  [12928/30013]\n",
      "loss: 0.000187  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000219  [  128/30013]\n",
      "loss: 0.000223  [12928/30013]\n",
      "loss: 0.000167  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000194  [  128/30013]\n",
      "loss: 0.000197  [12928/30013]\n",
      "loss: 0.000148  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000173  [  128/30013]\n",
      "loss: 0.000175  [12928/30013]\n",
      "loss: 0.000132  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.595018  [  128/30013]\n",
      "loss: 1.808862  [12928/30013]\n",
      "loss: 1.222676  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.7%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.200350  [  128/30013]\n",
      "loss: 0.932208  [12928/30013]\n",
      "loss: 0.686439  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.0%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.790069  [  128/30013]\n",
      "loss: 0.637457  [12928/30013]\n",
      "loss: 0.491059  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.3%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.617670  [  128/30013]\n",
      "loss: 0.479973  [12928/30013]\n",
      "loss: 0.367495  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.6%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.453611  [  128/30013]\n",
      "loss: 0.380710  [12928/30013]\n",
      "loss: 0.318161  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.7%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.343150  [  128/30013]\n",
      "loss: 0.340763  [12928/30013]\n",
      "loss: 0.284864  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.6%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.296031  [  128/30013]\n",
      "loss: 0.319648  [12928/30013]\n",
      "loss: 0.262137  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.7%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.282518  [  128/30013]\n",
      "loss: 0.253947  [12928/30013]\n",
      "loss: 0.149263  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.0%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.207432  [  128/30013]\n",
      "loss: 0.236279  [12928/30013]\n",
      "loss: 0.208051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.9%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.305383  [  128/30013]\n",
      "loss: 0.206959  [12928/30013]\n",
      "loss: 0.192056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.3%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.145570  [  128/30013]\n",
      "loss: 0.124015  [12928/30013]\n",
      "loss: 0.123238  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.089309  [  128/30013]\n",
      "loss: 0.115666  [12928/30013]\n",
      "loss: 0.114023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.114522  [  128/30013]\n",
      "loss: 0.109091  [12928/30013]\n",
      "loss: 0.073328  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.085960  [  128/30013]\n",
      "loss: 0.066548  [12928/30013]\n",
      "loss: 0.064267  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.066326  [  128/30013]\n",
      "loss: 0.060359  [12928/30013]\n",
      "loss: 0.065209  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.090293  [  128/30013]\n",
      "loss: 0.070773  [12928/30013]\n",
      "loss: 0.045886  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.033212  [  128/30013]\n",
      "loss: 0.171899  [12928/30013]\n",
      "loss: 0.049026  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.082872  [  128/30013]\n",
      "loss: 0.105521  [12928/30013]\n",
      "loss: 0.039270  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.142385  [  128/30013]\n",
      "loss: 0.107898  [12928/30013]\n",
      "loss: 0.043518  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.4%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.045467  [  128/30013]\n",
      "loss: 0.099111  [12928/30013]\n",
      "loss: 0.019777  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.029910  [  128/30013]\n",
      "loss: 0.104885  [12928/30013]\n",
      "loss: 0.066516  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.035875  [  128/30013]\n",
      "loss: 0.094187  [12928/30013]\n",
      "loss: 0.102505  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.081382  [  128/30013]\n",
      "loss: 0.023469  [12928/30013]\n",
      "loss: 0.023786  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.045897  [  128/30013]\n",
      "loss: 0.015721  [12928/30013]\n",
      "loss: 0.070871  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.092033  [  128/30013]\n",
      "loss: 0.016542  [12928/30013]\n",
      "loss: 0.009158  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.011654  [  128/30013]\n",
      "loss: 0.012728  [12928/30013]\n",
      "loss: 0.007666  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.016888  [  128/30013]\n",
      "loss: 0.006040  [12928/30013]\n",
      "loss: 0.010046  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.018845  [  128/30013]\n",
      "loss: 0.006413  [12928/30013]\n",
      "loss: 0.012383  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.009163  [  128/30013]\n",
      "loss: 0.011432  [12928/30013]\n",
      "loss: 0.005812  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.028653  [  128/30013]\n",
      "loss: 0.007921  [12928/30013]\n",
      "loss: 0.010208  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.046885  [  128/30013]\n",
      "loss: 0.049562  [12928/30013]\n",
      "loss: 0.041436  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.3%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.033407  [  128/30013]\n",
      "loss: 0.354341  [12928/30013]\n",
      "loss: 0.071055  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.9%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.020985  [  128/30013]\n",
      "loss: 0.017192  [12928/30013]\n",
      "loss: 0.010936  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.007252  [  128/30013]\n",
      "loss: 0.009245  [12928/30013]\n",
      "loss: 0.013745  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.008317  [  128/30013]\n",
      "loss: 0.008427  [12928/30013]\n",
      "loss: 0.004524  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.006329  [  128/30013]\n",
      "loss: 0.002739  [12928/30013]\n",
      "loss: 0.002446  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.003452  [  128/30013]\n",
      "loss: 0.003037  [12928/30013]\n",
      "loss: 0.001773  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.002804  [  128/30013]\n",
      "loss: 0.001974  [12928/30013]\n",
      "loss: 0.001609  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.002292  [  128/30013]\n",
      "loss: 0.001522  [12928/30013]\n",
      "loss: 0.001426  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.001870  [  128/30013]\n",
      "loss: 0.001360  [12928/30013]\n",
      "loss: 0.001268  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.001637  [  128/30013]\n",
      "loss: 0.001225  [12928/30013]\n",
      "loss: 0.001140  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.001460  [  128/30013]\n",
      "loss: 0.001107  [12928/30013]\n",
      "loss: 0.001032  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.001313  [  128/30013]\n",
      "loss: 0.001002  [12928/30013]\n",
      "loss: 0.000937  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.001188  [  128/30013]\n",
      "loss: 0.000909  [12928/30013]\n",
      "loss: 0.000852  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.001077  [  128/30013]\n",
      "loss: 0.000826  [12928/30013]\n",
      "loss: 0.000776  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000979  [  128/30013]\n",
      "loss: 0.000750  [12928/30013]\n",
      "loss: 0.000707  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000890  [  128/30013]\n",
      "loss: 0.000682  [12928/30013]\n",
      "loss: 0.000644  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000809  [  128/30013]\n",
      "loss: 0.000620  [12928/30013]\n",
      "loss: 0.000586  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000735  [  128/30013]\n",
      "loss: 0.000564  [12928/30013]\n",
      "loss: 0.000534  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000667  [  128/30013]\n",
      "loss: 0.000512  [12928/30013]\n",
      "loss: 0.000486  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000604  [  128/30013]\n",
      "loss: 0.000465  [12928/30013]\n",
      "loss: 0.000442  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000547  [  128/30013]\n",
      "loss: 0.000422  [12928/30013]\n",
      "loss: 0.000402  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000495  [  128/30013]\n",
      "loss: 0.000382  [12928/30013]\n",
      "loss: 0.000365  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000447  [  128/30013]\n",
      "loss: 0.000346  [12928/30013]\n",
      "loss: 0.000331  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000403  [  128/30013]\n",
      "loss: 0.000313  [12928/30013]\n",
      "loss: 0.000300  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000363  [  128/30013]\n",
      "loss: 0.000282  [12928/30013]\n",
      "loss: 0.000272  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000326  [  128/30013]\n",
      "loss: 0.000254  [12928/30013]\n",
      "loss: 0.000246  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000293  [  128/30013]\n",
      "loss: 0.000229  [12928/30013]\n",
      "loss: 0.000222  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000262  [  128/30013]\n",
      "loss: 0.000205  [12928/30013]\n",
      "loss: 0.000200  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000235  [  128/30013]\n",
      "loss: 0.000184  [12928/30013]\n",
      "loss: 0.000180  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000210  [  128/30013]\n",
      "loss: 0.000165  [12928/30013]\n",
      "loss: 0.000162  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000188  [  128/30013]\n",
      "loss: 0.000147  [12928/30013]\n",
      "loss: 0.000146  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000167  [  128/30013]\n",
      "loss: 0.000131  [12928/30013]\n",
      "loss: 0.000131  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000149  [  128/30013]\n",
      "loss: 0.000117  [12928/30013]\n",
      "loss: 0.000118  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000133  [  128/30013]\n",
      "loss: 0.000104  [12928/30013]\n",
      "loss: 0.000105  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000118  [  128/30013]\n",
      "loss: 0.000093  [12928/30013]\n",
      "loss: 0.000094  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000105  [  128/30013]\n",
      "loss: 0.000082  [12928/30013]\n",
      "loss: 0.000084  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000093  [  128/30013]\n",
      "loss: 0.000073  [12928/30013]\n",
      "loss: 0.000075  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000083  [  128/30013]\n",
      "loss: 0.000065  [12928/30013]\n",
      "loss: 0.000067  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000073  [  128/30013]\n",
      "loss: 0.000057  [12928/30013]\n",
      "loss: 0.000060  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.615610  [  128/30013]\n",
      "loss: 1.787903  [12928/30013]\n",
      "loss: 1.218315  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.6%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.207378  [  128/30013]\n",
      "loss: 0.940915  [12928/30013]\n",
      "loss: 0.698171  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.4%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.804491  [  128/30013]\n",
      "loss: 0.632731  [12928/30013]\n",
      "loss: 0.492264  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.8%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.663082  [  128/30013]\n",
      "loss: 0.471745  [12928/30013]\n",
      "loss: 0.351133  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.4%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.497038  [  128/30013]\n",
      "loss: 0.366076  [12928/30013]\n",
      "loss: 0.293979  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.7%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.335450  [  128/30013]\n",
      "loss: 0.296542  [12928/30013]\n",
      "loss: 0.232504  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.0%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.305320  [  128/30013]\n",
      "loss: 0.279915  [12928/30013]\n",
      "loss: 0.241319  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.2%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.208556  [  128/30013]\n",
      "loss: 0.229036  [12928/30013]\n",
      "loss: 0.211057  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.3%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.172058  [  128/30013]\n",
      "loss: 0.230723  [12928/30013]\n",
      "loss: 0.189378  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.8%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.219520  [  128/30013]\n",
      "loss: 0.161284  [12928/30013]\n",
      "loss: 0.224922  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.7%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.175498  [  128/30013]\n",
      "loss: 0.171080  [12928/30013]\n",
      "loss: 0.163283  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.2%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.106180  [  128/30013]\n",
      "loss: 0.129767  [12928/30013]\n",
      "loss: 0.084562  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.104421  [  128/30013]\n",
      "loss: 0.115266  [12928/30013]\n",
      "loss: 0.065657  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.1%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.072533  [  128/30013]\n",
      "loss: 0.072609  [12928/30013]\n",
      "loss: 0.059059  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.056471  [  128/30013]\n",
      "loss: 0.065992  [12928/30013]\n",
      "loss: 0.060888  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.056632  [  128/30013]\n",
      "loss: 0.073554  [12928/30013]\n",
      "loss: 0.062308  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.101857  [  128/30013]\n",
      "loss: 0.150141  [12928/30013]\n",
      "loss: 0.050983  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.084697  [  128/30013]\n",
      "loss: 0.079627  [12928/30013]\n",
      "loss: 0.070927  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.040312  [  128/30013]\n",
      "loss: 0.132237  [12928/30013]\n",
      "loss: 0.032621  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.094324  [  128/30013]\n",
      "loss: 0.067115  [12928/30013]\n",
      "loss: 0.023618  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.056500  [  128/30013]\n",
      "loss: 0.064354  [12928/30013]\n",
      "loss: 0.025461  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.036546  [  128/30013]\n",
      "loss: 0.049356  [12928/30013]\n",
      "loss: 0.043771  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.022497  [  128/30013]\n",
      "loss: 0.032278  [12928/30013]\n",
      "loss: 0.037420  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.015247  [  128/30013]\n",
      "loss: 0.038440  [12928/30013]\n",
      "loss: 0.016394  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.022898  [  128/30013]\n",
      "loss: 0.046341  [12928/30013]\n",
      "loss: 0.022459  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.019588  [  128/30013]\n",
      "loss: 0.016044  [12928/30013]\n",
      "loss: 0.008992  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.021171  [  128/30013]\n",
      "loss: 0.014200  [12928/30013]\n",
      "loss: 0.005748  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.013500  [  128/30013]\n",
      "loss: 0.038509  [12928/30013]\n",
      "loss: 0.004644  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.018633  [  128/30013]\n",
      "loss: 0.010778  [12928/30013]\n",
      "loss: 0.027339  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.023349  [  128/30013]\n",
      "loss: 0.013434  [12928/30013]\n",
      "loss: 0.012753  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.028584  [  128/30013]\n",
      "loss: 0.016189  [12928/30013]\n",
      "loss: 0.143045  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.025928  [  128/30013]\n",
      "loss: 0.029043  [12928/30013]\n",
      "loss: 0.006126  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.013294  [  128/30013]\n",
      "loss: 0.013343  [12928/30013]\n",
      "loss: 0.016806  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.014662  [  128/30013]\n",
      "loss: 0.044730  [12928/30013]\n",
      "loss: 0.026321  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.066447  [  128/30013]\n",
      "loss: 0.028841  [12928/30013]\n",
      "loss: 0.046352  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.054062  [  128/30013]\n",
      "loss: 0.008030  [12928/30013]\n",
      "loss: 0.011374  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.028050  [  128/30013]\n",
      "loss: 0.013014  [12928/30013]\n",
      "loss: 0.009665  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.010850  [  128/30013]\n",
      "loss: 0.014146  [12928/30013]\n",
      "loss: 0.005692  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.003875  [  128/30013]\n",
      "loss: 0.024628  [12928/30013]\n",
      "loss: 0.003116  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.003033  [  128/30013]\n",
      "loss: 0.001924  [12928/30013]\n",
      "loss: 0.001599  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.001675  [  128/30013]\n",
      "loss: 0.001674  [12928/30013]\n",
      "loss: 0.001195  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.001411  [  128/30013]\n",
      "loss: 0.001352  [12928/30013]\n",
      "loss: 0.001044  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.001219  [  128/30013]\n",
      "loss: 0.001178  [12928/30013]\n",
      "loss: 0.000927  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.001084  [  128/30013]\n",
      "loss: 0.001054  [12928/30013]\n",
      "loss: 0.000833  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000976  [  128/30013]\n",
      "loss: 0.000956  [12928/30013]\n",
      "loss: 0.000754  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000887  [  128/30013]\n",
      "loss: 0.000873  [12928/30013]\n",
      "loss: 0.000687  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000809  [  128/30013]\n",
      "loss: 0.000802  [12928/30013]\n",
      "loss: 0.000628  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000741  [  128/30013]\n",
      "loss: 0.000739  [12928/30013]\n",
      "loss: 0.000575  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000679  [  128/30013]\n",
      "loss: 0.000682  [12928/30013]\n",
      "loss: 0.000528  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000623  [  128/30013]\n",
      "loss: 0.000629  [12928/30013]\n",
      "loss: 0.000486  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000572  [  128/30013]\n",
      "loss: 0.000581  [12928/30013]\n",
      "loss: 0.000447  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000524  [  128/30013]\n",
      "loss: 0.000535  [12928/30013]\n",
      "loss: 0.000412  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000481  [  128/30013]\n",
      "loss: 0.000493  [12928/30013]\n",
      "loss: 0.000379  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000440  [  128/30013]\n",
      "loss: 0.000453  [12928/30013]\n",
      "loss: 0.000348  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000402  [  128/30013]\n",
      "loss: 0.000416  [12928/30013]\n",
      "loss: 0.000320  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000367  [  128/30013]\n",
      "loss: 0.000381  [12928/30013]\n",
      "loss: 0.000293  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000335  [  128/30013]\n",
      "loss: 0.000348  [12928/30013]\n",
      "loss: 0.000269  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000304  [  128/30013]\n",
      "loss: 0.000317  [12928/30013]\n",
      "loss: 0.000245  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000276  [  128/30013]\n",
      "loss: 0.000288  [12928/30013]\n",
      "loss: 0.000224  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000250  [  128/30013]\n",
      "loss: 0.000262  [12928/30013]\n",
      "loss: 0.000204  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000227  [  128/30013]\n",
      "loss: 0.000237  [12928/30013]\n",
      "loss: 0.000185  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000205  [  128/30013]\n",
      "loss: 0.000214  [12928/30013]\n",
      "loss: 0.000168  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000184  [  128/30013]\n",
      "loss: 0.000193  [12928/30013]\n",
      "loss: 0.000152  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000166  [  128/30013]\n",
      "loss: 0.000174  [12928/30013]\n",
      "loss: 0.000137  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000149  [  128/30013]\n",
      "loss: 0.000157  [12928/30013]\n",
      "loss: 0.000124  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000134  [  128/30013]\n",
      "loss: 0.000141  [12928/30013]\n",
      "loss: 0.000111  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000120  [  128/30013]\n",
      "loss: 0.000126  [12928/30013]\n",
      "loss: 0.000100  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000107  [  128/30013]\n",
      "loss: 0.000113  [12928/30013]\n",
      "loss: 0.000090  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000096  [  128/30013]\n",
      "loss: 0.000101  [12928/30013]\n",
      "loss: 0.000080  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000086  [  128/30013]\n",
      "loss: 0.000090  [12928/30013]\n",
      "loss: 0.000072  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000076  [  128/30013]\n",
      "loss: 0.000080  [12928/30013]\n",
      "loss: 0.000064  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000068  [  128/30013]\n",
      "loss: 0.000071  [12928/30013]\n",
      "loss: 0.000057  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000060  [  128/30013]\n",
      "loss: 0.000063  [12928/30013]\n",
      "loss: 0.000051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000054  [  128/30013]\n",
      "loss: 0.000056  [12928/30013]\n",
      "loss: 0.000045  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000048  [  128/30013]\n",
      "loss: 0.000050  [12928/30013]\n",
      "loss: 0.000040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000042  [  128/30013]\n",
      "loss: 0.000044  [12928/30013]\n",
      "loss: 0.000036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000038  [  128/30013]\n",
      "loss: 0.000039  [12928/30013]\n",
      "loss: 0.000032  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000033  [  128/30013]\n",
      "loss: 0.000035  [12928/30013]\n",
      "loss: 0.000028  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000030  [  128/30013]\n",
      "loss: 0.000031  [12928/30013]\n",
      "loss: 0.000025  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000026  [  128/30013]\n",
      "loss: 0.000027  [12928/30013]\n",
      "loss: 0.000022  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.602589  [  128/30013]\n",
      "loss: 1.828941  [12928/30013]\n",
      "loss: 1.217051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.5%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.180822  [  128/30013]\n",
      "loss: 0.932461  [12928/30013]\n",
      "loss: 0.698258  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.2%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.823019  [  128/30013]\n",
      "loss: 0.610850  [12928/30013]\n",
      "loss: 0.482292  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.2%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.634579  [  128/30013]\n",
      "loss: 0.453716  [12928/30013]\n",
      "loss: 0.334725  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.4%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.454154  [  128/30013]\n",
      "loss: 0.355419  [12928/30013]\n",
      "loss: 0.288868  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.1%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.336402  [  128/30013]\n",
      "loss: 0.305684  [12928/30013]\n",
      "loss: 0.238616  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.2%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.292279  [  128/30013]\n",
      "loss: 0.285551  [12928/30013]\n",
      "loss: 0.193614  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.3%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.294076  [  128/30013]\n",
      "loss: 0.272781  [12928/30013]\n",
      "loss: 0.186843  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.5%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.266838  [  128/30013]\n",
      "loss: 0.270640  [12928/30013]\n",
      "loss: 0.163002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.9%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.136240  [  128/30013]\n",
      "loss: 0.163645  [12928/30013]\n",
      "loss: 0.180256  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.139154  [  128/30013]\n",
      "loss: 0.118207  [12928/30013]\n",
      "loss: 0.106322  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.8%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.098348  [  128/30013]\n",
      "loss: 0.139556  [12928/30013]\n",
      "loss: 0.110563  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.1%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.081077  [  128/30013]\n",
      "loss: 0.149860  [12928/30013]\n",
      "loss: 0.275673  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.3%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.105527  [  128/30013]\n",
      "loss: 0.104260  [12928/30013]\n",
      "loss: 0.074278  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.3%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.109508  [  128/30013]\n",
      "loss: 0.092799  [12928/30013]\n",
      "loss: 0.043778  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.079549  [  128/30013]\n",
      "loss: 0.102849  [12928/30013]\n",
      "loss: 0.041107  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.099122  [  128/30013]\n",
      "loss: 0.097503  [12928/30013]\n",
      "loss: 0.092431  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.067051  [  128/30013]\n",
      "loss: 0.064944  [12928/30013]\n",
      "loss: 0.031631  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.058726  [  128/30013]\n",
      "loss: 0.101556  [12928/30013]\n",
      "loss: 0.026427  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.059107  [  128/30013]\n",
      "loss: 0.040208  [12928/30013]\n",
      "loss: 0.048250  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.042539  [  128/30013]\n",
      "loss: 0.069972  [12928/30013]\n",
      "loss: 0.012452  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.056199  [  128/30013]\n",
      "loss: 0.045946  [12928/30013]\n",
      "loss: 0.031942  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.071523  [  128/30013]\n",
      "loss: 0.030279  [12928/30013]\n",
      "loss: 0.019992  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.065545  [  128/30013]\n",
      "loss: 0.041485  [12928/30013]\n",
      "loss: 0.012686  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.014443  [  128/30013]\n",
      "loss: 0.024772  [12928/30013]\n",
      "loss: 0.018017  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.017297  [  128/30013]\n",
      "loss: 0.022850  [12928/30013]\n",
      "loss: 0.016095  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.029780  [  128/30013]\n",
      "loss: 0.013205  [12928/30013]\n",
      "loss: 0.035674  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.010954  [  128/30013]\n",
      "loss: 0.019166  [12928/30013]\n",
      "loss: 0.010187  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.025527  [  128/30013]\n",
      "loss: 0.024424  [12928/30013]\n",
      "loss: 0.015437  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.011296  [  128/30013]\n",
      "loss: 0.010867  [12928/30013]\n",
      "loss: 0.006757  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.034946  [  128/30013]\n",
      "loss: 0.013969  [12928/30013]\n",
      "loss: 0.013231  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.017406  [  128/30013]\n",
      "loss: 0.031180  [12928/30013]\n",
      "loss: 0.021365  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.015202  [  128/30013]\n",
      "loss: 0.015614  [12928/30013]\n",
      "loss: 0.033220  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.050313  [  128/30013]\n",
      "loss: 0.016096  [12928/30013]\n",
      "loss: 0.016684  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.012825  [  128/30013]\n",
      "loss: 0.011797  [12928/30013]\n",
      "loss: 0.019011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.015236  [  128/30013]\n",
      "loss: 0.009623  [12928/30013]\n",
      "loss: 0.004627  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.006327  [  128/30013]\n",
      "loss: 0.008963  [12928/30013]\n",
      "loss: 0.010191  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.004617  [  128/30013]\n",
      "loss: 0.014694  [12928/30013]\n",
      "loss: 0.003707  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.007813  [  128/30013]\n",
      "loss: 0.020641  [12928/30013]\n",
      "loss: 0.003135  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.003732  [  128/30013]\n",
      "loss: 0.007060  [12928/30013]\n",
      "loss: 0.009367  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.003223  [  128/30013]\n",
      "loss: 0.004128  [12928/30013]\n",
      "loss: 0.005653  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.003039  [  128/30013]\n",
      "loss: 0.037807  [12928/30013]\n",
      "loss: 0.013397  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.012713  [  128/30013]\n",
      "loss: 0.013643  [12928/30013]\n",
      "loss: 0.008397  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.018432  [  128/30013]\n",
      "loss: 0.021820  [12928/30013]\n",
      "loss: 0.009727  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.010591  [  128/30013]\n",
      "loss: 0.043474  [12928/30013]\n",
      "loss: 0.002864  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.018394  [  128/30013]\n",
      "loss: 0.003166  [12928/30013]\n",
      "loss: 0.003102  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.003915  [  128/30013]\n",
      "loss: 0.005934  [12928/30013]\n",
      "loss: 0.007204  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.008289  [  128/30013]\n",
      "loss: 0.007293  [12928/30013]\n",
      "loss: 0.008664  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.071352  [  128/30013]\n",
      "loss: 0.023128  [12928/30013]\n",
      "loss: 0.021718  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.034644  [  128/30013]\n",
      "loss: 0.005941  [12928/30013]\n",
      "loss: 0.007660  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.017273  [  128/30013]\n",
      "loss: 0.009579  [12928/30013]\n",
      "loss: 0.021461  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.003743  [  128/30013]\n",
      "loss: 0.005951  [12928/30013]\n",
      "loss: 0.003558  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.025923  [  128/30013]\n",
      "loss: 0.007518  [12928/30013]\n",
      "loss: 0.004051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.026055  [  128/30013]\n",
      "loss: 0.013628  [12928/30013]\n",
      "loss: 0.009737  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.004883  [  128/30013]\n",
      "loss: 0.028590  [12928/30013]\n",
      "loss: 0.010353  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.004935  [  128/30013]\n",
      "loss: 0.002821  [12928/30013]\n",
      "loss: 0.000790  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.003851  [  128/30013]\n",
      "loss: 0.001284  [12928/30013]\n",
      "loss: 0.000575  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.001090  [  128/30013]\n",
      "loss: 0.001057  [12928/30013]\n",
      "loss: 0.000590  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000642  [  128/30013]\n",
      "loss: 0.000856  [12928/30013]\n",
      "loss: 0.000455  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000555  [  128/30013]\n",
      "loss: 0.000730  [12928/30013]\n",
      "loss: 0.000421  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000497  [  128/30013]\n",
      "loss: 0.000641  [12928/30013]\n",
      "loss: 0.000390  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000450  [  128/30013]\n",
      "loss: 0.000571  [12928/30013]\n",
      "loss: 0.000363  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000410  [  128/30013]\n",
      "loss: 0.000513  [12928/30013]\n",
      "loss: 0.000338  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000377  [  128/30013]\n",
      "loss: 0.000465  [12928/30013]\n",
      "loss: 0.000315  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000347  [  128/30013]\n",
      "loss: 0.000424  [12928/30013]\n",
      "loss: 0.000293  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000322  [  128/30013]\n",
      "loss: 0.000388  [12928/30013]\n",
      "loss: 0.000274  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000298  [  128/30013]\n",
      "loss: 0.000356  [12928/30013]\n",
      "loss: 0.000255  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000278  [  128/30013]\n",
      "loss: 0.000328  [12928/30013]\n",
      "loss: 0.000238  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000258  [  128/30013]\n",
      "loss: 0.000303  [12928/30013]\n",
      "loss: 0.000222  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000241  [  128/30013]\n",
      "loss: 0.000280  [12928/30013]\n",
      "loss: 0.000206  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000225  [  128/30013]\n",
      "loss: 0.000259  [12928/30013]\n",
      "loss: 0.000192  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000210  [  128/30013]\n",
      "loss: 0.000239  [12928/30013]\n",
      "loss: 0.000178  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000195  [  128/30013]\n",
      "loss: 0.000221  [12928/30013]\n",
      "loss: 0.000166  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000182  [  128/30013]\n",
      "loss: 0.000204  [12928/30013]\n",
      "loss: 0.000153  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000170  [  128/30013]\n",
      "loss: 0.000188  [12928/30013]\n",
      "loss: 0.000142  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000158  [  128/30013]\n",
      "loss: 0.000174  [12928/30013]\n",
      "loss: 0.000131  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000146  [  128/30013]\n",
      "loss: 0.000160  [12928/30013]\n",
      "loss: 0.000121  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000136  [  128/30013]\n",
      "loss: 0.000147  [12928/30013]\n",
      "loss: 0.000112  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000126  [  128/30013]\n",
      "loss: 0.000135  [12928/30013]\n",
      "loss: 0.000103  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000116  [  128/30013]\n",
      "loss: 0.000124  [12928/30013]\n",
      "loss: 0.000095  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000107  [  128/30013]\n",
      "loss: 0.000113  [12928/30013]\n",
      "loss: 0.000087  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000099  [  128/30013]\n",
      "loss: 0.000104  [12928/30013]\n",
      "loss: 0.000080  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000091  [  128/30013]\n",
      "loss: 0.000095  [12928/30013]\n",
      "loss: 0.000073  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000084  [  128/30013]\n",
      "loss: 0.000086  [12928/30013]\n",
      "loss: 0.000067  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000077  [  128/30013]\n",
      "loss: 0.000079  [12928/30013]\n",
      "loss: 0.000061  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000070  [  128/30013]\n",
      "loss: 0.000072  [12928/30013]\n",
      "loss: 0.000056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000064  [  128/30013]\n",
      "loss: 0.000065  [12928/30013]\n",
      "loss: 0.000051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000058  [  128/30013]\n",
      "loss: 0.000059  [12928/30013]\n",
      "loss: 0.000046  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000053  [  128/30013]\n",
      "loss: 0.000054  [12928/30013]\n",
      "loss: 0.000042  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000048  [  128/30013]\n",
      "loss: 0.000049  [12928/30013]\n",
      "loss: 0.000038  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.621287  [  128/30013]\n",
      "loss: 1.829676  [12928/30013]\n",
      "loss: 1.197585  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.6%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.165823  [  128/30013]\n",
      "loss: 0.890569  [12928/30013]\n",
      "loss: 0.663839  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.1%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.796270  [  128/30013]\n",
      "loss: 0.605412  [12928/30013]\n",
      "loss: 0.490454  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.8%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.627809  [  128/30013]\n",
      "loss: 0.464738  [12928/30013]\n",
      "loss: 0.361169  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.6%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.496344  [  128/30013]\n",
      "loss: 0.365695  [12928/30013]\n",
      "loss: 0.299602  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.1%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.334608  [  128/30013]\n",
      "loss: 0.299646  [12928/30013]\n",
      "loss: 0.254771  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.3%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.296920  [  128/30013]\n",
      "loss: 0.273770  [12928/30013]\n",
      "loss: 0.210227  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.1%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.248769  [  128/30013]\n",
      "loss: 0.239160  [12928/30013]\n",
      "loss: 0.203093  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.1%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.193708  [  128/30013]\n",
      "loss: 0.205828  [12928/30013]\n",
      "loss: 0.126738  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.9%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.181516  [  128/30013]\n",
      "loss: 0.152968  [12928/30013]\n",
      "loss: 0.136786  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.4%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.153762  [  128/30013]\n",
      "loss: 0.136777  [12928/30013]\n",
      "loss: 0.174148  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.209602  [  128/30013]\n",
      "loss: 0.127994  [12928/30013]\n",
      "loss: 0.110672  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.089533  [  128/30013]\n",
      "loss: 0.083153  [12928/30013]\n",
      "loss: 0.094918  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.111463  [  128/30013]\n",
      "loss: 0.114743  [12928/30013]\n",
      "loss: 0.056816  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.8%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.081667  [  128/30013]\n",
      "loss: 0.083703  [12928/30013]\n",
      "loss: 0.041916  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.050669  [  128/30013]\n",
      "loss: 0.066681  [12928/30013]\n",
      "loss: 0.026438  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.095538  [  128/30013]\n",
      "loss: 0.116650  [12928/30013]\n",
      "loss: 0.027477  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.029797  [  128/30013]\n",
      "loss: 0.050305  [12928/30013]\n",
      "loss: 0.024243  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.025117  [  128/30013]\n",
      "loss: 0.061049  [12928/30013]\n",
      "loss: 0.022258  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.022912  [  128/30013]\n",
      "loss: 0.042346  [12928/30013]\n",
      "loss: 0.022446  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.034315  [  128/30013]\n",
      "loss: 0.042331  [12928/30013]\n",
      "loss: 0.043000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.044028  [  128/30013]\n",
      "loss: 0.056016  [12928/30013]\n",
      "loss: 0.077723  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.074621  [  128/30013]\n",
      "loss: 0.031310  [12928/30013]\n",
      "loss: 0.081079  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.021369  [  128/30013]\n",
      "loss: 0.025411  [12928/30013]\n",
      "loss: 0.076073  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.035562  [  128/30013]\n",
      "loss: 0.017084  [12928/30013]\n",
      "loss: 0.041333  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.030355  [  128/30013]\n",
      "loss: 0.025104  [12928/30013]\n",
      "loss: 0.038106  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.014138  [  128/30013]\n",
      "loss: 0.024418  [12928/30013]\n",
      "loss: 0.010953  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.064394  [  128/30013]\n",
      "loss: 0.035654  [12928/30013]\n",
      "loss: 0.020508  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.044842  [  128/30013]\n",
      "loss: 0.032677  [12928/30013]\n",
      "loss: 0.022098  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.031932  [  128/30013]\n",
      "loss: 0.025149  [12928/30013]\n",
      "loss: 0.010722  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.016909  [  128/30013]\n",
      "loss: 0.013976  [12928/30013]\n",
      "loss: 0.008426  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.010171  [  128/30013]\n",
      "loss: 0.017380  [12928/30013]\n",
      "loss: 0.013844  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.005993  [  128/30013]\n",
      "loss: 0.005148  [12928/30013]\n",
      "loss: 0.003388  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.003967  [  128/30013]\n",
      "loss: 0.002612  [12928/30013]\n",
      "loss: 0.002456  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.003733  [  128/30013]\n",
      "loss: 0.002230  [12928/30013]\n",
      "loss: 0.002075  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.002860  [  128/30013]\n",
      "loss: 0.001956  [12928/30013]\n",
      "loss: 0.001825  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.002396  [  128/30013]\n",
      "loss: 0.001752  [12928/30013]\n",
      "loss: 0.001631  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.002069  [  128/30013]\n",
      "loss: 0.001582  [12928/30013]\n",
      "loss: 0.001467  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.001815  [  128/30013]\n",
      "loss: 0.001435  [12928/30013]\n",
      "loss: 0.001323  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.001607  [  128/30013]\n",
      "loss: 0.001305  [12928/30013]\n",
      "loss: 0.001196  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.001431  [  128/30013]\n",
      "loss: 0.001187  [12928/30013]\n",
      "loss: 0.001081  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.001280  [  128/30013]\n",
      "loss: 0.001079  [12928/30013]\n",
      "loss: 0.000977  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.001147  [  128/30013]\n",
      "loss: 0.000981  [12928/30013]\n",
      "loss: 0.000883  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.001029  [  128/30013]\n",
      "loss: 0.000890  [12928/30013]\n",
      "loss: 0.000797  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000924  [  128/30013]\n",
      "loss: 0.000807  [12928/30013]\n",
      "loss: 0.000720  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000829  [  128/30013]\n",
      "loss: 0.000731  [12928/30013]\n",
      "loss: 0.000649  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000745  [  128/30013]\n",
      "loss: 0.000661  [12928/30013]\n",
      "loss: 0.000585  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000668  [  128/30013]\n",
      "loss: 0.000597  [12928/30013]\n",
      "loss: 0.000526  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000599  [  128/30013]\n",
      "loss: 0.000538  [12928/30013]\n",
      "loss: 0.000473  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000537  [  128/30013]\n",
      "loss: 0.000484  [12928/30013]\n",
      "loss: 0.000425  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000481  [  128/30013]\n",
      "loss: 0.000435  [12928/30013]\n",
      "loss: 0.000381  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000430  [  128/30013]\n",
      "loss: 0.000391  [12928/30013]\n",
      "loss: 0.000342  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000384  [  128/30013]\n",
      "loss: 0.000350  [12928/30013]\n",
      "loss: 0.000306  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000343  [  128/30013]\n",
      "loss: 0.000313  [12928/30013]\n",
      "loss: 0.000273  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000306  [  128/30013]\n",
      "loss: 0.000280  [12928/30013]\n",
      "loss: 0.000244  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000273  [  128/30013]\n",
      "loss: 0.000250  [12928/30013]\n",
      "loss: 0.000218  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000243  [  128/30013]\n",
      "loss: 0.000223  [12928/30013]\n",
      "loss: 0.000194  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000217  [  128/30013]\n",
      "loss: 0.000199  [12928/30013]\n",
      "loss: 0.000173  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000193  [  128/30013]\n",
      "loss: 0.000177  [12928/30013]\n",
      "loss: 0.000153  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000171  [  128/30013]\n",
      "loss: 0.000157  [12928/30013]\n",
      "loss: 0.000136  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000152  [  128/30013]\n",
      "loss: 0.000139  [12928/30013]\n",
      "loss: 0.000121  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000135  [  128/30013]\n",
      "loss: 0.000124  [12928/30013]\n",
      "loss: 0.000107  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000120  [  128/30013]\n",
      "loss: 0.000110  [12928/30013]\n",
      "loss: 0.000095  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000106  [  128/30013]\n",
      "loss: 0.000097  [12928/30013]\n",
      "loss: 0.000084  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000094  [  128/30013]\n",
      "loss: 0.000086  [12928/30013]\n",
      "loss: 0.000074  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000084  [  128/30013]\n",
      "loss: 0.000076  [12928/30013]\n",
      "loss: 0.000066  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000074  [  128/30013]\n",
      "loss: 0.000067  [12928/30013]\n",
      "loss: 0.000058  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000066  [  128/30013]\n",
      "loss: 0.000059  [12928/30013]\n",
      "loss: 0.000051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000058  [  128/30013]\n",
      "loss: 0.000052  [12928/30013]\n",
      "loss: 0.000045  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000051  [  128/30013]\n",
      "loss: 0.000046  [12928/30013]\n",
      "loss: 0.000040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000045  [  128/30013]\n",
      "loss: 0.000041  [12928/30013]\n",
      "loss: 0.000035  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000040  [  128/30013]\n",
      "loss: 0.000036  [12928/30013]\n",
      "loss: 0.000031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000035  [  128/30013]\n",
      "loss: 0.000032  [12928/30013]\n",
      "loss: 0.000027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000031  [  128/30013]\n",
      "loss: 0.000028  [12928/30013]\n",
      "loss: 0.000024  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000028  [  128/30013]\n",
      "loss: 0.000025  [12928/30013]\n",
      "loss: 0.000021  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000024  [  128/30013]\n",
      "loss: 0.000022  [12928/30013]\n",
      "loss: 0.000019  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000022  [  128/30013]\n",
      "loss: 0.000019  [12928/30013]\n",
      "loss: 0.000017  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000019  [  128/30013]\n",
      "loss: 0.000017  [12928/30013]\n",
      "loss: 0.000015  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000017  [  128/30013]\n",
      "loss: 0.000015  [12928/30013]\n",
      "loss: 0.000013  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000015  [  128/30013]\n",
      "loss: 0.000013  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000013  [  128/30013]\n",
      "loss: 0.000011  [12928/30013]\n",
      "loss: 0.000010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000012  [  128/30013]\n",
      "loss: 0.000010  [12928/30013]\n",
      "loss: 0.000009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: 0.000009  [12928/30013]\n",
      "loss: 0.000008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/30013]\n",
      "loss: 0.000008  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/30013]\n",
      "loss: 0.000007  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n"
     ]
    }
   ],
   "source": [
    "# 训一批目标模型\n",
    "for epochs in epoch_list:\n",
    "    (x_train, y_train), (x_test, y_test), train_keep_exp, test_keep_exp = load_Purchase100_limited(0, 100, prop_keep=0.5, seed=0)\n",
    "    training_data = CustomDataset(x_train, y_train, model_transform)\n",
    "    train_dataloader = DataLoader(training_data, batch_size=batch_size)\n",
    "    if model in ['NN', 'NN_4layer']:\n",
    "        TargetModel = globals()['create_{}_model'.format(model)](x_train.shape[1], y_train.max()+1)\n",
    "    elif model == 'CNN':\n",
    "        TargetModel = globals()['create_{}_model'.format(model)](y_train.max()+1, data_name)\n",
    "    # print(TargetModel)\n",
    "    TargetModel.to(device)\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    optimizer = torch.optim.Adam(TargetModel.parameters(), lr=LEARNING_RATE)\n",
    "    for t in range(epochs):\n",
    "        print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "        train(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "    print(\"Done!\")\n",
    "    weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_targetmodel{}.pth\".format(data_name, model, epochs, 0))\n",
    "    #weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_model{}.pth\".format(data_name, model, epochs, i))\n",
    "    torch.save(TargetModel.state_dict(), weight_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "id": "e69f2443-dbab-4557-b408-6fbdb3b4e9c2",
   "metadata": {},
   "outputs": [],
   "source": [
    "shadow_result = []\n",
    "LIRA_result = []\n",
    "x = 6000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 68,
   "id": "5f8997e0-a4dd-40b9-a2a7-ba15b37007df",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " Error: \n",
      " Accuracy: 89.2%  \n",
      "\n",
      "AUC value is: 0.5239260156038851\n",
      "Accuracy is: 0.49993333333333334\n",
      "AUC value is: 0.5347508904143339\n",
      "Accuracy is: 0.4918166666666667\n",
      " Error: \n",
      " Accuracy: 89.1%  \n",
      "\n",
      "AUC value is: 0.5671029870448943\n",
      "Accuracy is: 0.5044333333333333\n",
      "AUC value is: 0.5748046268244243\n",
      "Accuracy is: 0.5010166666666667\n",
      " Error: \n",
      " Accuracy: 88.4%  \n",
      "\n",
      "AUC value is: 0.5878290048256687\n",
      "Accuracy is: 0.5185833333333333\n",
      "AUC value is: 0.6022559119791656\n",
      "Accuracy is: 0.5116\n",
      " Error: \n",
      " Accuracy: 92.5%  \n",
      "\n",
      "AUC value is: 0.6315488535908402\n",
      "Accuracy is: 0.54075\n",
      "AUC value is: 0.6480061716811589\n",
      "Accuracy is: 0.5278666666666667\n",
      " Error: \n",
      " Accuracy: 93.3%  \n",
      "\n",
      "AUC value is: 0.6661579956452237\n",
      "Accuracy is: 0.5730833333333333\n",
      "AUC value is: 0.6945312326397536\n",
      "Accuracy is: 0.5542666666666667\n",
      " Error: \n",
      " Accuracy: 94.0%  \n",
      "\n",
      "AUC value is: 0.683637036705177\n",
      "Accuracy is: 0.6410666666666667\n",
      "AUC value is: 0.7596963737652079\n",
      "Accuracy is: 0.6044666666666667\n",
      " Error: \n",
      " Accuracy: 93.7%  \n",
      "\n",
      "AUC value is: 0.6898232489779212\n",
      "Accuracy is: 0.6999166666666666\n",
      "AUC value is: 0.7868633555332301\n",
      "Accuracy is: 0.63635\n",
      " Error: \n",
      " Accuracy: 93.8%  \n",
      "\n",
      "AUC value is: 0.6969417219812789\n",
      "Accuracy is: 0.7383166666666666\n",
      "AUC value is: 0.8154163397837348\n",
      "Accuracy is: 0.6772333333333334\n",
      " Error: \n",
      " Accuracy: 93.4%  \n",
      "\n",
      "AUC value is: 0.6985702106204063\n",
      "Accuracy is: 0.7259833333333333\n",
      "AUC value is: 0.8064887736628918\n",
      "Accuracy is: 0.6632\n",
      " Error: \n",
      " Accuracy: 93.4%  \n",
      "\n",
      "AUC value is: 0.5021558865159387\n",
      "Accuracy is: 0.5016166666666667\n",
      "AUC value is: 0.4953293263451735\n",
      "Accuracy is: 0.4930333333333333\n"
     ]
    }
   ],
   "source": [
    "# 测试目标模型\n",
    "# 准备测试数据集\n",
    "for epochs in epoch_list:\n",
    "    # 准备数据\n",
    "\n",
    "    x_test = X_data\n",
    "    y_test = Y_data\n",
    "    mem_label = train_keep[0]\n",
    "    # 加载目标模型\n",
    "    TargetModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "    weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_targetmodel{}.pth\".format(data_name, model, epochs, 0))\n",
    "    TargetModel.load_state_dict(torch.load(weight_path))\n",
    "    TargetModel.to(device)\n",
    "    \n",
    "    test_data = CustomDataset(x_test, y_test, model_transform)\n",
    "    test_dataloader = DataLoader(test_data, batch_size=batch_size)\n",
    "    \n",
    "    conf_data, label_data = get_model_pred(test_dataloader, TargetModel, device)\n",
    "    conf_data = conf_data.detach().cpu().numpy()\n",
    "    label_data = label_data.detach().cpu().numpy()\n",
    "    conf_data = conf_data.astype(np.float64)\n",
    "    score_tar = cal_score(conf_data.copy(), label_data)\n",
    "\n",
    "    # 执行影子模型攻击\n",
    "    targetX = conf_data\n",
    "    targetY = mem_label\n",
    "    targetX = targetX.astype(np.float32)\n",
    "    targetX, _ = get_top_k_conf(3, targetX, targetX)\n",
    "    shadow_attack_data = CustomDataset(targetX, targetY, attack_transform)\n",
    "    shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "    attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "    attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "    accuracy = evaluate_ROC(attack_test_scores, attack_test_mem)\n",
    "    shadow_result.append(accuracy)\n",
    "\n",
    "    # 执行风险评估攻击\n",
    "    score_tar = cal_score(conf_data.copy(), label_data)\n",
    "    pri_risk_t = pri_risk_all\n",
    "    pri_risk_rank_t = np.argsort(pri_risk_t)\n",
    "    pri_risk_rank_t = np.flip(pri_risk_rank_t)\n",
    "\n",
    "    pred_result = LIRA_attack(train_keep, score_all, score_tar, mem_label)\n",
    "    evaluate_ROC(pred_result, mem_label, threshold=0)\n",
    "    pred_clip = pred_result[pri_risk_rank_t[:x]]\n",
    "    mem_clip = mem_label[pri_risk_rank_t[:x]]\n",
    "    pred_clip = pred_clip > 0\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    LIRA_result.append(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "id": "eddcc506-ac11-4059-8814-8f4e17afba6f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.49993333333333334,\n",
       " 0.5044333333333333,\n",
       " 0.5185833333333333,\n",
       " 0.54075,\n",
       " 0.5730833333333333,\n",
       " 0.6410666666666667,\n",
       " 0.6999166666666666,\n",
       " 0.7383166666666666,\n",
       " 0.7259833333333333,\n",
       " 0.5016166666666667]"
      ]
     },
     "execution_count": 69,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "shadow_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 70,
   "id": "240fd8bc-8cd3-46fc-acb8-ad7cb6abec3a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.5106666666666667,\n",
       " 0.595,\n",
       " 0.6678333333333333,\n",
       " 0.7735,\n",
       " 0.893,\n",
       " 0.9836666666666667,\n",
       " 0.98,\n",
       " 0.9725,\n",
       " 0.9773333333333334,\n",
       " 0.48983333333333334]"
      ]
     },
     "execution_count": 70,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "LIRA_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "19411c52-2fa3-42c7-a58b-2e508374a093",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "cf545338-f08b-4385-87c4-b848e8a80c8e",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0308f234-469f-4670-b4e4-51973cb27ce4",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "a9b3ad57-7f95-4bc7-8057-05aa0ca4e10a",
   "metadata": {},
   "source": [
    "### 绘制攻击成功率随风险变化曲线"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "03e43bb9-0680-487b-8433-805cb202031f",
   "metadata": {},
   "outputs": [],
   "source": [
    "pred_all = conf_data_all.argmax(2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "id": "b7dda5b4-e79d-40b7-beb8-86d21143ac11",
   "metadata": {},
   "outputs": [],
   "source": [
    "base_att = (pred_all == Y_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "id": "a79890e7-c8ea-4ddd-9e0a-c51831260ad9",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(100, 60000)"
      ]
     },
     "execution_count": 41,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "base_att.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "id": "18020329-0447-496c-bffa-a59f865df12c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(100, 60000)"
      ]
     },
     "execution_count": 42,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_keep.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "id": "67ad94bc-8667-4f2b-ab7b-3d32a98e97d8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(60000,)"
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pri_risk_all.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "id": "973858e9-c4fd-47cd-9941-c5730dfff80a",
   "metadata": {},
   "outputs": [],
   "source": [
    "X_axi = []\n",
    "Y_axi = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "id": "9093a24b-eca9-4afb-928a-f0912abcdf58",
   "metadata": {},
   "outputs": [],
   "source": [
    "for i in range(10000):\n",
    "    pred_t = base_att[:,i]\n",
    "    mem_t = train_keep[:,i]\n",
    "    risk_t = pri_risk_all[i]\n",
    "    acc = metrics.accuracy_score(mem_t, pred_t)\n",
    "    X_axi.append(risk_t)\n",
    "    Y_axi.append(acc)\n",
    "\n",
    "df=pd.DataFrame({'risk': X_axi, 'attack_acc': Y_axi })\n",
    "df.to_csv('Purhase100_risk_att.csv', index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b6e285fa-d32d-488c-9e75-393538e0aea9",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6bef1726-f622-40d8-a7f2-dfab744c5caf",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1c9d0c88-8b55-48c9-8555-4487e54112b8",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b184f5a3-7be6-4c2f-8b95-7e95f2abd9c3",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "4d44c0fa-c7b9-4f39-a245-0f8e3671141c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(50000,)"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pri_risk_rank.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "030de03c-e397-4141-8f84-6cd9910e91a7",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# 输入loss_data_all，pri_risk_rank，train_keep，conf_data_all，pri_risk_all\n",
    "\n",
    "for i in range(100): # 对数据下标进行计数\n",
    "    start = i * 500\n",
    "    end = (i+1) * 500\n",
    "    risk_t = pri_risk_all[pri_risk_rank[start:end]]\n",
    "    for j in range(100): # 对目标模型进行计数\n",
    "        pred_temp = base_att[j][pri_risk_rank[start:end]]\n",
    "        mem_temp = train_keep[j][pri_risk_rank[start:end]]\n",
    "        if j==0:\n",
    "            pred_t = pred_temp\n",
    "            mem_t = mem_temp\n",
    "        else:\n",
    "            pred_t = np.concatenate((pred_t, pred_temp), 0)\n",
    "            mem_t = np.concatenate((mem_t, mem_temp), 0)\n",
    "    acc = metrics.accuracy_score(mem_t, pred_t)\n",
    "    risk = np.mean(risk_t)\n",
    "    X_axi.append(risk)\n",
    "    Y_axi.append(acc)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "a4df62ec-4eb6-4c8f-a522-737a9e0d0e06",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "238bf52f-5f07-4176-9642-bd34be61a5a6",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAGwCAYAAABGogSnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABIJUlEQVR4nO3dd3xUVf7/8fekF1KAQAIhBELvYJBQBBUjiC6KuIqIgii4q+CicVVYBdTdn6zriqjLgoViF1Fsi18Eg4hAaKFXadITSCCF9Mzc3x+BgZhCBiaZmczr+XjMA3Ln3Dufy81435577rkmwzAMAQAAuAkPRxcAAABQkwg/AADArRB+AACAWyH8AAAAt0L4AQAAboXwAwAA3ArhBwAAuBUvRxdQ0ywWi06cOKGgoCCZTCZHlwMAAKrAMAxlZ2ercePG8vC4ur4btws/J06cUFRUlKPLAAAAV+Do0aNq0qTJVW3D7cJPUFCQpJJ/vODgYAdXAwAAqiIrK0tRUVHW8/jVcLvwc+FSV3BwMOEHAAAXY48hKwx4BgAAboXwAwAA3ArhBwAAuBXCDwAAcCuEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANwK4QcAALgVwg8AAHArbvdgUwAAYF+GYaig2KK8QrPyiswymaRGIf6OLqtChB8AAGq5wmKL8orMyi8yK7fQbA0p1j+LzMorLD7/s6Xk7xfa/m69/PKWF5llGBc/r0ezevr8z70ct8OXQfgBAMCBzBbjfNAoVn6hRblFxWXDSSV/5haZlX/+59zz4cT69/PLiy3G5QuxEx9PD5lMNfZxV4TwAwBABSwWQ/nF50PG74JF3vnQUV4vSF7h73tVKv6z0Gypsf3x9DApwNtTfj6e8vf2VICPp/y8L/n7Jcv9vUveC/DxlL/PJX8/397//HLr388v9/J0/uHEhB8AgEv6/TiTMj0iv+8FuWS59TJPUenLPL8PLflFNRdMTCaVDhaV/GkNJz6eCvC+NJx4nW/nIX9vrzLreXuaZHL2bpkaQPgBAFSL8seZFCuv0HLxMk8Fl26uZJxJdfPz9qig18NL/hfeuyR8BPh4le5VOb9emV6V88t9vTwIJjWE8AMAuGIZuYU6cPqcDpzKKfnz9DkdOJ2jY2dzVWSu2XEmft4lgaPCXpPfhRabLvl4ecrDg2BSWxB+AACVMlsMncjI0/7T53Tg1LlSYSc9p/Cy6186zqS8Xo9LL91U5ZKPv4+nAry95OdzsSfGFcaZwHkQfgAAkqTcwmIdPJ1j7b05cD7sHErLUUFxxWNfGof4qUXDOmrRoI5aNAhUiwZ1FB0WqDq+JZeAfLwIJnAuhB8AcCOGYej0uYIyl6kOnDqn4xl5Fa7n4+WhmLDAiwHnfNhpHhaoQF9OJXAt/MYCQC1UZLboyJlcHTh17vzlqothJzu/uML16gX6WHtvWjSooxYNS/7epG6APBnzglqC8AMALiwrv+j8OJyLl6kOnD6nw+m5FU5s52GSouoFlLpM1bJhHcU0qKN6gT41vAdAzSP8AICTs1gMnczKvzjY+HxPzv7T53Q6u6DC9QJ8PEsFnAuXqqLrB8jP27MG9wBwLoQfAHASFouhg2k52pOSVeoy1cHTOcorMle4Xniw78XLVJeMx4kI9uP2bKAchB8AcADDMHQ4PVfbjmdq+7EMbTuWqR3HM5VTWH7I8fIwqVlYYKnLVC0a1FFMg0AF+XnXcPWAayP8AEA1MwxDxzPytP1YprYdz9S2YxnafixTWeUMPPbz9lDbiGC1alin1O3jUfUC5M1cNoBdEH4AwM5Ss/K19WiGth/P1LZjmdp+PFNnypkM0MfLQ+0aBatLkxB1igxR5yahatEgkAn7gGpG+AGAq5B2rqCkR+dYprYfL7l8daqcQcheHia1bRSkTpGh6nw+7LQOD2ICQMABCD8AUEUZuYUXe3OOlVy+OpGZX6adh0lqHR5U0psTFarOkSFqExHEHVaAkyD8AEA5svOLtON4lrYfz9DW82HnyJncMu1MJqlFgzrqHBmiTk1C1LlJiNo3CpG/D0EHcFaEHwBuL7ewWDtPZJ3v0cnQtuOZOng6p9y2zeoHqFOTUGvY6dA4mLutABdD+AHgdorMFm09mqFV+9O0Zn+6Nh05W+5syJGh/urcpGQgcucmIerYOEQhAQQdwNURfgDUeoZhaG9qtlbvT9fq/WladzC9zHw64cG+JSHnfI9Op8gQ1a/j66CKAVQnwg+AWul4Rp5W70sr6d05kK60c6XvwKob4K3eLcN0Xcsw9WkRpqb1AxxUKYCaRvgBUCuczSlU0sGSnp3V+9P0W3rpwcn+3p7q0bye+rSsrz4tw9QuIphHPwBuivADwCXlF5m14bcz1nE7O05kyrhk2I6nh0ldmoToupZh6t0yTN2ahsrXizuwABB+ALiIYrNF249nas2BdK3al6bkw2dVaLaUatM6vI56tyi5lBUXU4+7sACUi/ADwCkZhqEDp3O0en/JuJ21B9OV/btnYTUK8VOflmEll7JahKlhsJ+DqgXgSgg/AJzGycw8rd6frjXnBymnZJWePTnYz0u9WtS3XsqKCQuUycS4HQC2IfwAcJhLByknHUjXwbTSEwv6eHno2mZ1rZeyOkaGyJNBygCuEuEHQI3JLSzW+kNntOZASeDZdTKr1CBlD5PUqUmo+rQouSMrNrouz8MCYHeEHwDVprDYoi1HM7R6f5rWHEjTlqMZKjKXnkn5wiDl3i3qKy6mvkL8GaQMoHoRfgDYjcViaNfJLK05kKbV+9O14bczyv3dTMqRof7WuXZ6taivhkEMUgZQswg/AK5KZm6Rvtt2omTczsF0ZeQWlXq/fqCPerWor94tSu7KalovgEHKAByK8APgiuQVmjV/zW+atWK/si65BT3Qx1NxMfXV+/y4nTbhQcykDMCpEH4A2KTYbNHC5GOa8eOvSs0qeV5W6/A6Gty5sXq3DFPnJiHy9vRwcJUAUDHCD4AqMQxD/7cjRf/+Ya/1lvTIUH89NaC17ugayS3oAFwG4QfAZa3Zn6ZXluzR1mOZkqR6gT4af2NLjejZlOdlAXA5hB8AFdpxPFOvLNmjX/alSSoZzzOmb4zG9otRHV/+8wHANfFfLwBl/JaWo38v3av/bTspSfL2NGlEXLTG92+psDq+Dq4OAK4O4QeA1ansfL2ZuE+frT+qYoshk0ka0jVST8a3VtP6AY4uDwDswuG3ZMycOVPNmjWTn5+f4uLitH79+grbFhUV6aWXXlKLFi3k5+enLl26aMmSJTVYLVA7ZeUX6d8/7NX1/1qhj9YeUbHF0A1tGmjx4331+rCuBB8AtYpDe34WLFighIQEzZ49W3FxcZoxY4YGDhyovXv3qmHDhmXaP//88/roo4/07rvvqm3btvrhhx905513as2aNerWrZsD9gBwbbmFxfp47RHNXLHfOjlht6ahevaWtuoZU9/B1QFA9TAZhmFcvln1iIuL07XXXqv//Oc/kiSLxaKoqCg9/vjjmjhxYpn2jRs31nPPPadx48ZZl911113y9/fXRx99VKXPzMrKUkhIiDIzMxUcHGyfHQFczOnsAn2Q9Js+XHvYGnpaNqyjpwe20YD24czADMDp2PP87bCen8LCQiUnJ2vSpEnWZR4eHoqPj1dSUlK56xQUFMjPr/RzgPz9/bVq1aoKP6egoEAFBQXWn7Oysq6ycsB17T+Vrfd+OaRFm4+rsNgiSYquH6BxN7TU0Gsi5cXkhADcgMPCT1pamsxms8LDw0stDw8P1549e8pdZ+DAgZo+fbr69eunFi1aKDExUYsWLZLZbC63vSRNmzZNL774ol1rB1yJYRhaf+iM3ll5UIl7TlmXd2saqj/1i9HN7SOYoBCAW3Gpu73eeOMNjR07Vm3btpXJZFKLFi00evRozZ07t8J1Jk2apISEBOvPWVlZioqKqolyAYcqNlu0ZGeK3l150Do5ockkDWgfrkf6xSg2up6DKwQAx3BY+AkLC5Onp6dSU1NLLU9NTVVERES56zRo0EBff/218vPzlZ6ersaNG2vixImKiYmp8HN8fX3l68u8JHAfOQXF+nzjUc1ZdUjHzuZJkny9PPTH2CZ6+LrmimlQx8EVAoBjOSz8+Pj4KDY2VomJiRoyZIikkgHPiYmJGj9+fKXr+vn5KTIyUkVFRfryyy91zz331EDFgHM7lZWv+Wt+00drD1ufsl4v0Ecje0XrgZ7Rqs/khAAgycGXvRISEjRq1Ch1795dPXr00IwZM5STk6PRo0dLkkaOHKnIyEhNmzZNkrRu3TodP35cXbt21fHjx/XCCy/IYrHomWeeceRuAA71a2q23vvloL7efEKF5pJBzM3DAjWmb3PddU0T+Xnz7C0AuJRDw8+wYcN0+vRpTZkyRSkpKeratauWLFliHQR95MgReXhcvPskPz9fzz//vA4ePKg6dero1ltv1YcffqjQ0FAH7QHgGIZh6Jd9aXpv1SGt/PW0dXn36Lp6pF+M4tuFy4NBzABQLofO8+MIzPMDV5ZfZNa3W07ovVUH9WvqOUmSh0ka2CFCY/rGKDa6roMrBIDqUSvm+QFQdennCvTR2iP6cO1vSjtXKKnkCevDrm2q0X2aKaoej58AgKoi/ABObP+pbM1ZdUhfbro4KWHjED+N7tNcw3pEKdjP28EVAoDrIfwATsYwDK3en673Vh3Uir0Xx/N0aRKiMX1jdEvHCHkzEzMAXDHCD+Akis0WfbX5uOasOqQ9KdmSLk5KOPb8eB6euQUAV4/wAziBjb+d0fNf77CGngAfT93TPUqj+zRTdP1AB1cHALUL4QdwoPRzBfrn/+3RwuRjkqTQAG/9qV8L3dejqUICGM8DANWB8AM4gMVi6LMNR/XKkj3KzCuSJN17bZSeuaWt6gX6OLg6AKjdCD9ADdtxPFPPfb1DW49mSJLaNwrW34d0ZI4eAKghhB+ghmTmFWn60r36cO1hWQwpyNdLCQNa64Ge0fLi7i0AqDGEH6CaGYahb7ac0D8W71bauQJJ0h1dG+u5W9upYbCfg6sDAPdD+AGq0b7UbE3+ZofWHjwjSYppEKh/3NFRvVuGObgyAHBfhB+gGuxNydYHSb9pwYajKrYY8vP20OP9W2ls3xj5eHGJCwAcifAD2ElhsUVLd6Xog6TDWn/ojHX5ze3DNeUP7Xn+FgA4CcIPcJVOZubp03VH9OmGozqdXTKmx9PDpJvbhWtk72j1bsElLgBwJoQf4AoYhqE1B9L1YdJhLdudKrPFkCQ1CPLV8B5NNbxHlBqF+Du4SgBAeQg/gA2y84v0RfIxfbj2sA6ezrEuj2teTw/0itaA9hGM6QEAJ0f4Aapo/aEzeuKzzTqRmS9JCvTx1NBrmuiBXtFqHR7k4OoAAFVF+AEuo9hs0ZvL9+s/y/fJYkhN6wVobL8Y3dktUnV8+QoBgKvhv9xAJY6dzdUTn23RxsNnJUl/jG2iF2/voEBCDwC4LP4LDlRg8baTmrhom7LzixXk66V/3NlRd3SNdHRZAICrRPgBfie3sFgvfrtLCzYelSR1axqqN+/txjw9AFBLEH6AS+w4nqm/fLZZB0/nyGSSxt3QUhPiW8mbB48CQK1B+AFUMm/P3NW/6ZX/26NCs0Xhwb56fVhXJigEgFqI8AO3dyanUE99vkU/7T0tqeRxFP+6q7PqBvo4uDIAQHUg/MCtbT2aocc+3qTjGXny8fLQ5Nva6f6e0TKZTI4uDQBQTQg/cEuGYeiT9Uf04re7VGi2qFn9AM26P1btGgU7ujQAQDUj/MDt5BWa9dzX27Vo03FJ0oD24fr3PV0U7Oft4MoAADWB8AO3cigtR49+lKw9KdnyMEnP3tJWj/SL4TIXALgRwg/cxg87U/TXz7cqu6BYYXV89Nbwa9SrRX1HlwUAqGGEH9R6xWaLXl26V2//fFCS1D26rmaOuEbhwX4OrgwA4AiEH9RqaecKNP6TTVp78Iwk6aE+zTXp1rZMWggAbozwg1rr4OlzGjVvvY6eyVOgj6de+WNn/aFzY0eXBQBwMMIPaqXkw2c15v0NOptbpKb1AjT3we5q2TDI0WUBAJwA4Qe1ztKdKXr8080qKLaoc5MQzRl1rRoE+Tq6LACAkyD8oFb5cO1hTf1mhyyG1L9tQ/3nvm4K8OHXHABwEWcF1AqGYehfP+zVrBUHJEnDe0Tp73d0lBcDmwEAv0P4gcsrLLbo2S+36avNJTM2J9zcWo/3b8nEhQCAchF+4NKy84v06EebtGp/mjw9TJo2tJPu6R7l6LIAAE6M8AOXlZqVr1Fz12tPSrYCfTz13/tjdX3rBo4uCwDg5Ag/cDmZeUVauPGo3ll5UKeyC9QgyFfzHrxWHSNDHF0aAMAFEH7gMvalZuv9pN/0ZfJx5RWZJUktGgRq/ugeiqoX4ODqAACugvADp2a2GPppzynNX/ObVu1Psy5vGxGkUb2baUjXSPn7eDqwQgCAqyH8wClduLT1QdJhHTmTK0nyMEk3tw/Xg72bq2dMPe7mAgBcEcIPnM7320/q6YVblVNYcmkrxN9b914bpft7RnN5CwBw1Qg/cCpzVx3S3xfvkmFIrcPraHSf5lzaAgDYFeEHTsFiMTTt/3br3V8OSZJG9orW1MEd5OnBpS0AgH0RfuBwBcVm/XXhNn239YQk6dlb2urP18cwpgcAUC0IP3CozLwi/enDjVp78Iy8PEx69e7OurNbE0eXBQCoxQg/cJiTmXl6cO4G7U3NVh1fL826/xr1bcUMzQCA6kX4gUP8mpqtUXPX62RmvhoG+Wre6GvVoTEzNAMAqh/hBzVu7cF0PfLBRmXlF6tFg0C9/1APNanLLewAgJpB+EGNWrjxqP721XYVmQ11j66r90Z1V2iAj6PLAgC4EcIPaoTFYujVpXs1a8UBSdJtnRrptXu6yM+b+XsAADWL8INql1tYrIQFW7VkZ4ok6fH+LfVkfGt5MIcPAMABCD+oVqlZ+Rrz/kZtP54pH08PvfLHTtzKDgBwKMIPqs2O45ka8/5GpWTlq16gj955IFbdm9VzdFkAADdH+EG1+GFnip74bIvyisxq1bCO5oy6Vk3rc0cXAMDxCD+wq6z8Ik1f+qveT/pNhiH1bRWmmSOuUbCft6NLAwBAEuEHdmIYhr7dekJ//99upZ0rkFTycNIpf2gvL08PB1cHAMBFhB9ctf2nsjX5651KOpguSYoJC9RLd3TUda3CHFwZAABlEX5wxXILi/XW8v1675eDKjIb8vXy0OP9W2psvxj5ejF/DwDAORF+cEV+2XdaE7/cruMZeZKkm9o21Au3d1BUPQY1AwCcG+EHNks+fFYPz9+oQrNFkaH+euH2Drq5fbijywIAoEocPhJ15syZatasmfz8/BQXF6f169dX2n7GjBlq06aN/P39FRUVpSeffFL5+fk1VC2OZ+TpTx+WBJ/4dg21LKEfwQcA4FJsDj+jRo3SypUr7fLhCxYsUEJCgqZOnapNmzapS5cuGjhwoE6dOlVu+08++UQTJ07U1KlTtXv3bs2ZM0cLFizQ3/72N7vUg8rlFhZr7PsblXauUG0jgvTGvd0U4EPnIQDAtdgcfjIzMxUfH69WrVrp5Zdf1vHjx6/4w6dPn66xY8dq9OjRat++vWbPnq2AgADNnTu33PZr1qxRnz59dN9996lZs2YaMGCAhg8fftneIlw9i8XQU59v1a6TWaof6KP3RnVXoC/BBwDgemwOP19//bWOHz+uRx99VAsWLFCzZs00aNAgffHFFyoqKqrydgoLC5WcnKz4+PiLxXh4KD4+XklJSeWu07t3byUnJ1vDzsGDB/X999/r1ltvrfBzCgoKlJWVVeoF272RuE//tyNF3p4mzX4gVk3qMrAZAOCarmjMT4MGDZSQkKCtW7dq3bp1atmypR544AE1btxYTz75pPbt23fZbaSlpclsNis8vPR4kfDwcKWkpJS7zn333aeXXnpJ1113nby9vdWiRQvdcMMNlV72mjZtmkJCQqyvqKgo23YW+t+2E3ojseSY/r8hnXQtz+cCALiwqxrwfPLkSS1btkzLli2Tp6enbr31Vm3fvl3t27fX66+/bq8arVasWKGXX35Z//3vf7Vp0yYtWrRIixcv1t///vcK15k0aZIyMzOtr6NHj9q9rtpsx/FM/XXhVknSmOua655rCY8AANdm86CNoqIiffvtt5o3b56WLl2qzp0764knntB9992n4OBgSdJXX32lhx56SE8++WSF2wkLC5Onp6dSU1NLLU9NTVVERES560yePFkPPPCAxowZI0nq1KmTcnJy9Mgjj+i5556Th0fZLOfr6ytfX19bdxOSTmXla+wHG5VfZNH1rRto0q3tHF0SAABXzebw06hRI1ksFutA465du5Zpc+ONNyo0NLTS7fj4+Cg2NlaJiYkaMmSIJMlisSgxMVHjx48vd53c3NwyAcfTs2QmYcMwbN0VVGLXiSz95bPNOpmZrxYNAvXWfd3k6WFydFkAAFw1m8PP66+/rrvvvlt+fn4VtgkNDdWhQ4cuu62EhASNGjVK3bt3V48ePTRjxgzl5ORo9OjRkqSRI0cqMjJS06ZNkyQNHjxY06dPV7du3RQXF6f9+/dr8uTJGjx4sDUE4epYLIbmrj6kfy3Zq0KzRQ2CfPXeqGt5KjsAoNawOfzcfvvtys3NLRN+zpw5Iy8vL+ulr6oYNmyYTp8+rSlTpiglJUVdu3bVkiVLrIOgjxw5Uqqn5/nnn5fJZNLzzz+v48ePq0GDBho8eLD+3//7f7buBsqRmpWvvy7cql/2pUmS4ts11Ct3dVb9Olw2BADUHibDxutFgwYN0uDBg/XYY4+VWj579mx9++23+v777+1aoL1lZWUpJCREmZmZNgW12m7JjhRNWrRNZ3OL5Oftocl/aK/7ejSVycSlLgCA49nz/G3z3V7r1q3TjTfeWGb5DTfcoHXr1l1VMah5uYXFmrRom/78UbLO5hapQ+Ng/e/xvhoRF03wAQDUSjZf9iooKFBxcXGZ5UVFRcrLy7NLUagZuYXFuvedtdp2LFMmk/RIvxg9dXMb+Xg5/JFvAABUG5vPcj169NA777xTZvns2bMVGxtrl6JQ/cwWQxM+26JtxzJVN8BbHz8cp0mD2hF8AAC1ns09P//4xz8UHx+vrVu36qabbpIkJSYmasOGDVq6dKndC0T1mPb9bi3blSofTw+9O7K7ujNrMwDATdj8v/l9+vRRUlKSoqKi9Pnnn+u7775Ty5YttW3bNvXt27c6aoSdfbT2sN5bVTIVwat3dyb4AADcis13e7k6d7/ba8XeU3r4/Y0yWww9dXNrPX5TK0eXBADAZdnz/G3zZa9L5efnq7CwsNQydwwUrmJPSpbGf7JZZouhu65povH9Wzq6JAAAapzNl71yc3M1fvx4NWzYUIGBgapbt26pF5zTqax8PTRvg84VFCuueT1NG9qJW9kBAG7J5vDz9NNPa/ny5Zo1a5Z8fX313nvv6cUXX1Tjxo31wQcfVEeNuEoZuYV6+P2NOpGZr5iwQL39QCx3dQEA3JbNl72+++47ffDBB7rhhhs0evRo9e3bVy1btlR0dLQ+/vhjjRgxojrqxBU6djZXo+au14HTOaob4K25D16r0AAfR5cFAIDD2Py//2fOnFFMTIykkvE9Z86ckSRdd911WrlypX2rw1XZdSJLQ/+7RgdO56hRiJ8W/KmXmoUFOrosAAAcyubwExMTY31ie9u2bfX5559LKukRCg0NtWtxuHJr9qdp2NtJOpVdoNbhdfTlo73VOjzI0WUBAOBwNoef0aNHa+vWrZKkiRMnaubMmfLz89OTTz6pp59+2u4Fwnbfbj2hUfPWK7ugWD2a19PCP/dW41B/R5cFAIBTuOp5fg4fPqzk5GS1bNlSnTt3tldd1aa2z/Pz2fojmrhouyTp1k4Rmn5PV/l5ezq4KgAAro7DnupeVFSkm266Sfv27bMui46O1tChQ10i+NR2m4+c1eRvdkiSRvWK1lvDryH4AADwOzbd7eXt7a1t27ZVVy24CmdzCjX+k80qMhsa1DFCL9zegXl8AAAoh81jfu6//37NmTOnOmrBFbJYDCV8vkXHM/LUrH6AXvljZ4IPAAAVsHmen+LiYs2dO1c//vijYmNjFRhY+tbp6dOn2604VM2snw/op72n5ePloZkjrlGwn7ejSwIAwGnZHH527Niha665RpL066+/lnqP3oaal3QgXa8t3StJeun2DurQOMTBFQEA4NxsDj8//fRTddSBK3AqO1+Pf7pZFkMaek2khl0b5eiSAABwejzgyUWZLYYmfLpFaedKJjH8x5CO9LwBAFAFNvf83HjjjZWeZJcvX35VBaFq/rN8v5IOpivAx1P/HRGrAB+bDyUAAG7J5jNm165dS/1cVFSkLVu2aMeOHRo1apS96kIlkg6k643EkvFW/xjSUS0b1nFwRQAAuA6bw8/rr79e7vIXXnhB586du+qCULn0cwWa8FnJOJ8/xjbR0GuaOLokAABcit3G/Nx///2aO3euvTaHclgshp5auFWnsgvUokGgXrqjg6NLAgDA5dgt/CQlJcnPz89em0M53v3loFbsPS3f8/P5MM4HAADb2Xz2HDp0aKmfDcPQyZMntXHjRk2ePNluhaG0Hccz9eoPJfP5vHB7B7WNqH0PZQUAoCbYHH5CQkpPoufh4aE2bdropZde0oABA+xWGC4yDEMvfLtTxZaS53bdy3w+AABcMZvDz7x586qjDlTi260ntPHwWfl7e2rK4PbM5wMAwFWweczPhg0btG7dujLL161bp40bN9qlKFyUU1Csad/vkSSNu7GFGoX4O7giAABcm83hZ9y4cTp69GiZ5cePH9e4cePsUhQu+u+K/UrJyldUPX+N6Rvj6HIAAHB5NoefXbt2WR9seqlu3bpp165ddikKJQ6n5+jdlYckSc/f1l5+3p4OrggAANdnc/jx9fVVampqmeUnT56Ulxe3XtvTPxbvVqHZor6twjSgfbijywEAoFawOfwMGDBAkyZNUmZmpnVZRkaG/va3v+nmm2+2a3HubONvZ7RsV6q8PEyayiBnAADsxuaumn//+9/q16+foqOj1a1bN0nSli1bFB4erg8//NDuBbqrt1celFTyCIuWDYMcXA0AALWHzeEnMjJS27Zt08cff6ytW7fK399fo0eP1vDhw+Xt7V0dNbqdA6fP6cfdJZcWGeQMAIB9XdEgncDAQD3yyCP2rgXnzVl1SIYhxbdryBPbAQCwM5vH/EybNq3cB5jOnTtXr7zyil2Kcmdp5wr0ZfIxSdIj/Vo4uBoAAGofm8PP22+/rbZt25ZZ3qFDB82ePdsuRbmzD5IOq6DYoi5Robq2WV1HlwMAQK1jc/hJSUlRo0aNyixv0KCBTp48aZei3FVeoVkfJv0mSXqkbwx3eAEAUA1sDj9RUVFavXp1meWrV69W48aN7VKUu/py0zGdzS1SVD1/DezAvD4AAFQHmwc8jx07Vk888YSKiorUv39/SVJiYqKeeeYZPfXUU3Yv0J18u+WEJGlkz2by8rQ5lwIAgCqwOfw8/fTTSk9P12OPPabCwkJJkp+fn5599llNmjTJ7gW6i7RzBdp4+IwkaVCnCAdXAwBA7WVz+DGZTHrllVc0efJk7d69W/7+/mrVqpV8fX2roz63sXz3KVkMqWNksJrUDXB0OQAA1FpX/DCuOnXq6Nprr7VnLW7th50pkqQB7en1AQCgOl1R+Nm4caM+//xzHTlyxHrp64JFixbZpTB3klNQrF/2p0mSBjDQGQCAamXzqNrPPvtMvXv31u7du/XVV1+pqKhIO3fu1PLlyxUSElIdNdZ6K389rcJii6LrB6hNOM/xAgCgOtkcfl5++WW9/vrr+u677+Tj46M33nhDe/bs0T333KOmTZtWR4213tJdJc/xGtA+nLl9AACoZjaHnwMHDui2226TJPn4+CgnJ0cmk0lPPvmk3nnnHbsXWNsVmS1KPP8Q0wEdGO8DAEB1szn81K1bV9nZ2ZJKnvC+Y8cOSVJGRoZyc3PtW50bWHfwjLLyi1U/0EfXNOVxFgAAVDebBzz369dPy5YtU6dOnXT33XdrwoQJWr58uZYtW6abbrqpOmqs1S7c5XVz+3B5enDJCwCA6mZz+PnPf/6j/Px8SdJzzz0nb29vrVmzRnfddZeef/55uxdYm2XnF+nrzcclSbd05JIXAAA1webwU69ePevfPTw8NHHiRLsW5E4+XX9E2QXFatmwjvq1auDocgAAcAs8QMpBCostmrPqkCTpkX4x8uCSFwAANYLw4yDfbDmu1KwChQf76o6ujR1dDgAAboPw4wAWi6F3Vh6UJD3Up7l8vTwdXBEAAO6D8OMAP/96WvtOnVMdXy8Nj2NiSAAAapLN4eenn36q8L2ZM2deVTHuYtH5O7zu7t5EwX7eDq4GAAD3YnP4GTp0qJKTk8ssf+ONNzRp0iS7FFWb5RWarTM6396FsT4AANQ0m8PPq6++qkGDBmnPnj3WZa+99pqmTJmixYsX27W42mj5nlPKLTSrSV1/dY0KdXQ5AAC4HZvn+RkzZozOnDmj+Ph4rVq1SgsWLNDLL7+s77//Xn369KmOGmuV/207IUm6rXMjHmIKAIAD2Bx+JOmZZ55Renq6unfvLrPZrB9++EE9e/a0d221Tk5BsZbvOSVJGtyZS14AADhClcLPm2++WWZZZGSkAgIC1K9fP61fv17r16+XJP3lL3+xb4W1yI+7U1VQbFF0/QB1aBzs6HIAAHBLJsMwjMs1at68edU2ZjLp4MGDNhcxc+ZMvfrqq0pJSVGXLl301ltvqUePHuW2veGGG/Tzzz+XWX7rrbdWacxRVlaWQkJClJmZqeDgmg0gI95bq9X70zX+xpb668A2NfrZAAC4Mnuev6vU83Po0KGr+pDKLFiwQAkJCZo9e7bi4uI0Y8YMDRw4UHv37lXDhg3LtF+0aJEKCwutP6enp6tLly66++67q61Ge/g1NVur96fLwyTd2yPK0eUAAOC2HD7J4fTp0zV27FiNHj1a7du31+zZsxUQEKC5c+eW275evXqKiIiwvpYtW6aAgIAKw09BQYGysrJKvRxh/prfJEkD2keoSd0Ah9QAAACuIPzcddddeuWVV8os/9e//mVz70thYaGSk5MVHx9/sSAPD8XHxyspKalK25gzZ47uvfdeBQYGlvv+tGnTFBISYn1FRdV8r0tmbpEWbTomSXqwT7Ma/3wAAHCRzeFn5cqVuvXWW8ssHzRokFauXGnTttLS0mQ2mxUeHl5qeXh4uFJSUi67/vr167Vjxw6NGTOmwjaTJk1SZmam9XX06FGbarSH/9txUvlFFrWNCFJc83o1/vkAAOAim291P3funHx8fMos9/b2rvFLSnPmzFGnTp0qHBwtSb6+vvL19a3Bqspaf+iMJCm+XThz+wAA4GA29/x06tRJCxYsKLP8s88+U/v27W3aVlhYmDw9PZWamlpqeWpqqiIiIipdNycnR5999pkefvhhmz7TEdb/VhJ+etDrAwCAw9nc8zN58mQNHTpUBw4cUP/+/SVJiYmJ+vTTT7Vw4UKbtuXj46PY2FglJiZqyJAhkiSLxaLExESNHz++0nUXLlyogoIC3X///bbuQo06kZGnY2fz5GGSromu6+hyAABwezaHn8GDB+vrr7/Wyy+/rC+++EL+/v7q3LmzfvzxR11//fU2F5CQkKBRo0ape/fu6tGjh2bMmKGcnByNHj1akjRy5EhFRkZq2rRppdabM2eOhgwZovr169v8mTVpw/len46RIarje0UTagMAADu6orPxbbfdpttuu80uBQwbNkynT5/WlClTlJKSoq5du2rJkiXWQdBHjhyRh0fpq3N79+7VqlWrtHTpUrvUUJ3WnR/v06MZl7wAAHAGVZrhuTap6RmeB7z+s35NPae3H4jVwA6Vj2MCAADlq/EZni9lNpv1+uuv6/PPP9eRI0dKzbYsSWfOnLmqgmqT7Pwi7Tt1TpJ0TVPG+wAA4AxsvtvrxRdf1PTp0zVs2DBlZmYqISFBQ4cOlYeHh1544YVqKNF1bT+WKcOQIkP91SDIsbfbAwCAEjaHn48//ljvvvuunnrqKXl5eWn48OF67733NGXKFK1du7Y6anRZm49mSJK6Ng11aB0AAOAim8NPSkqKOnXqJEmqU6eOMjMzJUl/+MMfqvRUdXey5Xz46RYV6tA6AADARTaHnyZNmujkyZOSpBYtWljvuNqwYYPDZ1J2Nlsv9PwQfgAAcBo2h58777xTiYmJkqTHH39ckydPVqtWrTRy5Eg99NBDdi/QVZ3OLtCp7AKZTFKHxiGOLgcAAJxn891e//znP61/HzZsmKKjo7VmzRq1atVKgwcPtmtxrmxvSrYkqXn9QPn7eDq4GgAAcIHN4WflypXq3bu3vLxKVu3Zs6d69uyp4uJirVy5Uv369bN7ka5oT0rJQ17bRAQ5uBIAAHApmy973XjjjeXO5ZOZmakbb7zRLkXVBnvO9/y0jaj+iRQBAEDV2Rx+DMOQyWQqszw9PV2BgYF2Kao2oOcHAADnVOXLXkOHDpUkmUwmPfjgg6Xu7DKbzdq2bZt69+5t/wpdkNliaF9qyczOhB8AAJxLlcNPSEjJHUuGYSgoKEj+/v7W93x8fNSzZ0+NHTvW/hW6oCNnclVQbJGft4ea1gtwdDkAAOASVQ4/8+bNkyQ1a9ZMTz/9tAICOKlX5MKdXq3Dg+TpUfYSIQAAcBybx/z8/PPPZR5mKpU8bbV///52KcrV/ZpaEn5aNeSSFwAAzsZu4Sc/P1+//PKLXYpydb+l50iSYhowABwAAGdT5cte27Ztk1Qy5mfXrl1KSUmxvmc2m7VkyRJFRkbav0IXdOxMniSpSV3/y7QEAAA1rcrhp2vXrjKZTDKZTOVe3vL399dbb71l1+Jc1bGzuZKkKAY7AwDgdKocfg4dOiTDMBQTE6P169erQYMG1vd8fHzUsGFDeXryGIfCYotOZuVLkqLqEn4AAHA2VQ4/0dHRkiSLxVJtxdQGqVn5MgzJx9NDYXV8HF0OAAD4HZuf7XXBrl27dOTIkTKDn2+//farLsqVZeYVSZLqBnqXOxM2AABwLJvDz8GDB3XnnXdq+/btMplMMgxDkqwnerPZbN8KXUxGbkn4CfWn1wcAAGdk863uEyZMUPPmzXXq1CkFBARo586dWrlypbp3764VK1ZUQ4mu5WxuSU9YSIC3gysBAADlsbnnJykpScuXL1dYWJg8PDzk4eGh6667TtOmTdNf/vIXbd68uTrqdBkZFy57EX4AAHBKNvf8mM1mBQWVzFwcFhamEydOSCoZEL137177VueCMs/3/HDZCwAA52Rzz0/Hjh21detWNW/eXHFxcfrXv/4lHx8fvfPOO4qJiamOGl3KmZzzY34C6fkBAMAZ2Rx+nn/+eeXklDy+4aWXXtIf/vAH9e3bV/Xr19eCBQvsXqCrOX2uQJLUoI6vgysBAADlsTn8DBw40Pr3li1bas+ePTpz5ozq1q3Lrd2S0rJLwk8Y4QcAAKd0xfP8XKpevXr22EytkHaO8AMAgDOzecAzKncmp2TAc31mdwYAwCkRfuzIMAxl5ZcMeA7xZ8AzAADOiPBjRwXFFhWZS2a8Dib8AADglAg/dpR1foJDD5MU6MMT7gEAcEaEHzu6cMkr2J+HmgIA4KwIP3aUlV8sSarja5eb6AAAQDUg/NhR9vnwE+THeB8AAJwV4ceOss9f9gryo+cHAABnRfixo3MXen647AUAgNMi/NjRuYKS8BNI+AEAwGkRfuyooNgiSfLz5p8VAABnxVnajvKLzJIkP2/m+AEAwFkRfuzoYs8P4QcAAGdF+LEja8+PF/+sAAA4K87SdlRQVNLz40P4AQDAaXGWtqMiM+EHAABnx1najgouhB9P/lkBAHBWnKXtqOj8gGdven4AAHBanKXt6MJlL296fgAAcFqcpe2okMteAAA4Pc7SdnQ2p+TBpiEBPNUdAABnRfixo7zz8/zwYFMAAJwX4ceOii0ll708PUwOrgQAAFSE8GNHZrMhSfLy4J8VAABnxVnajootJeGHnh8AAJwX4ceOzOfDj5cn4QcAAGdF+LGjCz0/HibCDwAAzorwY0eF52d49mWGZwAAnBZnaTtihmcAAJwfZ2k7KmbMDwAATo/wYyeGYVj/zpgfAACcF+HHTiwXs4+40x0AAOdF+LET8yXpx0TPDwAATovwYyeWUpe9HFgIAAColMPDz8yZM9WsWTP5+fkpLi5O69evr7R9RkaGxo0bp0aNGsnX11etW7fW999/X0PVVswoddmL9AMAgLNy6OPHFyxYoISEBM2ePVtxcXGaMWOGBg4cqL1796phw4Zl2hcWFurmm29Ww4YN9cUXXygyMlKHDx9WaGhozRf/OxceairxeAsAAJyZQ8PP9OnTNXbsWI0ePVqSNHv2bC1evFhz587VxIkTy7SfO3euzpw5ozVr1sjb21uS1KxZs5osuULF5otdP8zzAwCA83LYWbqwsFDJycmKj4+/WIyHh+Lj45WUlFTuOt9++6169eqlcePGKTw8XB07dtTLL78ss9lc4ecUFBQoKyur1Ks6FF8y4JmeHwAAnJfDwk9aWprMZrPCw8NLLQ8PD1dKSkq56xw8eFBffPGFzGazvv/+e02ePFmvvfaa/vGPf1T4OdOmTVNISIj1FRUVZdf9uODCPD/kHgAAnJtLXZ+xWCxq2LCh3nnnHcXGxmrYsGF67rnnNHv27ArXmTRpkjIzM62vo0ePVk9t5zt+GOwMAIBzc9iYn7CwMHl6eio1NbXU8tTUVEVERJS7TqNGjeTt7S1PT0/rsnbt2iklJUWFhYXy8fEps46vr698fX3tW3w5LtzqTvYBAMC5Oaznx8fHR7GxsUpMTLQus1gsSkxMVK9evcpdp0+fPtq/f78sl9xZ9euvv6pRo0blBp+adGGSQ3p+AABwbg697JWQkKB3331X77//vnbv3q1HH31UOTk51ru/Ro4cqUmTJlnbP/roozpz5owmTJigX3/9VYsXL9bLL7+scePGOWoXrC70/DDYGQAA5+bQW92HDRum06dPa8qUKUpJSVHXrl21ZMkS6yDoI0eOyMPjYj6LiorSDz/8oCeffFKdO3dWZGSkJkyYoGeffdZRu2DFmB8AAFyDybj0ceRuICsrSyEhIcrMzFRwcLDdtnvw9Dn1f+1nBfl5afsLA+22XQAAYN/zt0vd7eXMLiRI+n0AAHBuhB87udB/xhPdAQBwboQfu+FWdwAAXAHhx06sPT+OLQMAAFwG4cdOrGN+6PoBAMCpEX7shJ4fAABcA+HHTgzG/AAA4BIIP3ZycbYk0g8AAM6M8GMnFx5vwdMtAABwboQfO7k4z49j6wAAAJUj/NiZicteAAA4NcIPAABwK4QfO+OyFwAAzo3wYycX7/YCAADOjPBjJ9Z5fhxcBwAAqBzhx054qjsAAK6B8AMAANwK4QcAALgVwo+dMN4ZAADXQPgBAABuhfADAADcCuHHzrjZCwAA50b4AQAAboXwAwAA3Arhx04Mnm8BAIBLIPwAAAC3QvgBAABuhfADAADcCuHHzrjVHQAA50b4AQAAboXwAwAA3ArhBwAAuBXCDwAAcCuEHzthikMAAFwD4QcAALgVwg8AAHArhB8AAOBWCD8AAMCtEH4AAIBbIfwAAAC3QvgBAABuhfBjJ7kFZknS0TN5Dq4EAABUhvBjJ4u3n3B0CQAAoAoIP3Zye5dIR5cAAACqgPBjJ77eJf+UTesFOLgSAABQGcIPAABwK4QfAADgVgg/AADArRB+AACAWyH8AAAAt0L4AQAAboXwAwAA3ArhBwAAuBXCDwAAcCuEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+LGzrPwiR5cAAAAqQfixk19TsiVJGbmEHwAAnBnhx04OpeU4ugQAAFAFhB87GdAhwtElAACAKiD82InJVPJn03oBji0EAABUyinCz8yZM9WsWTP5+fkpLi5O69evr7Dt/PnzZTKZSr38/PxqsNryncjIkyQdOZPr4EoAAEBlHB5+FixYoISEBE2dOlWbNm1Sly5dNHDgQJ06darCdYKDg3Xy5Enr6/DhwzVYcfnGf7LZ0SUAAIAqcHj4mT59usaOHavRo0erffv2mj17tgICAjR37twK1zGZTIqIiLC+wsPDa7Di8k0c1NbRJQAAgCpwaPgpLCxUcnKy4uPjrcs8PDwUHx+vpKSkCtc7d+6coqOjFRUVpTvuuEM7d+6ssG1BQYGysrJKvarDn69vod/+eZt+++dt1bJ9AABgHw4NP2lpaTKbzWV6bsLDw5WSklLuOm3atNHcuXP1zTff6KOPPpLFYlHv3r117NixcttPmzZNISEh1ldUVJTd9wMAALgOh1/2slWvXr00cuRIde3aVddff70WLVqkBg0a6O233y63/aRJk5SZmWl9HT16tIYrBgAAzsTLkR8eFhYmT09PpaamllqempqqiIiqzZvj7e2tbt26af/+/eW+7+vrK19f36uuFQAA1A4O7fnx8fFRbGysEhMTrcssFosSExPVq1evKm3DbDZr+/btatSoUXWVCQAAahGH9vxIUkJCgkaNGqXu3burR48emjFjhnJycjR69GhJ0siRIxUZGalp06ZJkl566SX17NlTLVu2VEZGhl599VUdPnxYY8aMceRuAAAAF+Hw8DNs2DCdPn1aU6ZMUUpKirp27aolS5ZYB0EfOXJEHh4XO6jOnj2rsWPHKiUlRXXr1lVsbKzWrFmj9u3bO2oXAACACzEZhmE4uoialJWVpZCQEGVmZio4ONjR5QAAgCqw5/nb5e72AgAAuBqEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANyKwyc5rGkXpjXKyspycCUAAKCqLpy37TE9oduFn+zsbElSVFSUgysBAAC2ys7OVkhIyFVtw+1meLZYLDpx4oSCgoJkMpnsuu2srCxFRUXp6NGjtXr2aPaz9nGXfWU/axd32U/Jffa1sv00DEPZ2dlq3LhxqcdeXQm36/nx8PBQkyZNqvUzgoODa/Uv5wXsZ+3jLvvKftYu7rKfkvvsa0X7ebU9Phcw4BkAALgVwg8AAHArhB878vX11dSpU+Xr6+voUqoV+1n7uMu+sp+1i7vsp+Q++1pT++l2A54BAIB7o+cHAAC4FcIPAABwK4QfAADgVgg/AADArRB+bDRz5kw1a9ZMfn5+iouL0/r16yttv3DhQrVt21Z+fn7q1KmTvv/++xqq9MpMmzZN1157rYKCgtSwYUMNGTJEe/furXSd+fPny2QylXr5+fnVUMVX5oUXXihTc9u2bStdx9WO5QXNmjUrs68mk0njxo0rt72rHM+VK1dq8ODBaty4sUwmk77++utS7xuGoSlTpqhRo0by9/dXfHy89u3bd9nt2vodr26V7WdRUZGeffZZderUSYGBgWrcuLFGjhypEydOVLrNK/n9rwmXO6YPPvhgmbpvueWWy27XlY6ppHK/ryaTSa+++mqF23TGY1qV80l+fr7GjRun+vXrq06dOrrrrruUmppa6Xav9Lt9KcKPDRYsWKCEhARNnTpVmzZtUpcuXTRw4ECdOnWq3PZr1qzR8OHD9fDDD2vz5s0aMmSIhgwZoh07dtRw5VX3888/a9y4cVq7dq2WLVumoqIiDRgwQDk5OZWuFxwcrJMnT1pfhw8frqGKr1yHDh1K1bxq1aoK27risbxgw4YNpfZz2bJlkqS77767wnVc4Xjm5OSoS5cumjlzZrnv/+tf/9Kbb76p2bNna926dQoMDNTAgQOVn59f4TZt/Y7XhMr2Mzc3V5s2bdLkyZO1adMmLVq0SHv37tXtt99+2e3a8vtfUy53TCXplltuKVX3p59+Wuk2Xe2YSiq1fydPntTcuXNlMpl01113VbpdZzumVTmfPPnkk/ruu++0cOFC/fzzzzpx4oSGDh1a6Xav5LtdhoEq69GjhzFu3Djrz2az2WjcuLExbdq0ctvfc889xm233VZqWVxcnPGnP/2pWuu0p1OnThmSjJ9//rnCNvPmzTNCQkJqrig7mDp1qtGlS5cqt68Nx/KCCRMmGC1atDAsFku577vi8ZRkfPXVV9afLRaLERERYbz66qvWZRkZGYavr6/x6aefVrgdW7/jNe33+1me9evXG5KMw4cPV9jG1t9/RyhvX0eNGmXccccdNm2nNhzTO+64w+jfv3+lbVzhmP7+fJKRkWF4e3sbCxcutLbZvXu3IclISkoqdxtX+t3+PXp+qqiwsFDJycmKj4+3LvPw8FB8fLySkpLKXScpKalUe0kaOHBghe2dUWZmpiSpXr16lbY7d+6coqOjFRUVpTvuuEM7d+6sifKuyr59+9S4cWPFxMRoxIgROnLkSIVta8OxlEp+jz/66CM99NBDlT7Y1xWP56UOHTqklJSUUscsJCREcXFxFR6zK/mOO6PMzEyZTCaFhoZW2s6W339nsmLFCjVs2FBt2rTRo48+qvT09Arb1oZjmpqaqsWLF+vhhx++bFtnP6a/P58kJyerqKio1PFp27atmjZtWuHxuZLvdnkIP1WUlpYms9ms8PDwUsvDw8OVkpJS7jopKSk2tXc2FotFTzzxhPr06aOOHTtW2K5NmzaaO3euvvnmG3300UeyWCzq3bu3jh07VoPV2iYuLk7z58/XkiVLNGvWLB06dEh9+/ZVdnZ2ue1d/Vhe8PXXXysjI0MPPvhghW1c8Xj+3oXjYssxu5LvuLPJz8/Xs88+q+HDh1f68Etbf/+dxS233KIPPvhAiYmJeuWVV/Tzzz9r0KBBMpvN5bavDcf0/fffV1BQ0GUvBTn7MS3vfJKSkiIfH58yQf1y59ULbaq6Tnnc7qnuqLpx48Zpx44dl71u3KtXL/Xq1cv6c+/evdWuXTu9/fbb+vvf/17dZV6RQYMGWf/euXNnxcXFKTo6Wp9//nmV/g/LVc2ZM0eDBg1S48aNK2zjiscTJYOf77nnHhmGoVmzZlXa1lV//++9917r3zt16qTOnTurRYsWWrFihW666SYHVlZ95s6dqxEjRlz2pgNnP6ZVPZ/UFHp+qigsLEyenp5lRqGnpqYqIiKi3HUiIiJsau9Mxo8fr//973/66aef1KRJE5vW9fb2Vrdu3bR///5qqs7+QkND1bp16wprduVjecHhw4f1448/asyYMTat54rH88JxseWYXcl33FlcCD6HDx/WsmXLKu31Kc/lfv+dVUxMjMLCwiqs25WPqST98ssv2rt3r83fWcm5jmlF55OIiAgVFhYqIyOjVPvLnVcvtKnqOuUh/FSRj4+PYmNjlZiYaF1msViUmJhY6v+SL9WrV69S7SVp2bJlFbZ3BoZhaPz48frqq6+0fPlyNW/e3OZtmM1mbd++XY0aNaqGCqvHuXPndODAgQprdsVj+Xvz5s1Tw4YNddttt9m0nisez+bNmysiIqLUMcvKytK6desqPGZX8h13BheCz759+/Tjjz+qfv36Nm/jcr//zurYsWNKT0+vsG5XPaYXzJkzR7GxserSpYvN6zrDMb3c+SQ2Nlbe3t6ljs/evXt15MiRCo/PlXy3KyoOVfTZZ58Zvr6+xvz5841du3YZjzzyiBEaGmqkpKQYhmEYDzzwgDFx4kRr+9WrVxteXl7Gv//9b2P37t3G1KlTDW9vb2P79u2O2oXLevTRR42QkBBjxYoVxsmTJ62v3Nxca5vf7+eLL75o/PDDD8aBAweM5ORk49577zX8/PyMnTt3OmIXquSpp54yVqxYYRw6dMhYvXq1ER8fb4SFhRmnTp0yDKN2HMtLmc1mo2nTpsazzz5b5j1XPZ7Z2dnG5s2bjc2bNxuSjOnTpxubN2+23uX0z3/+0wgNDTW++eYbY9u2bcYdd9xhNG/e3MjLy7Nuo3///sZbb71l/fly33FHqGw/CwsLjdtvv91o0qSJsWXLllLf2YKCAus2fr+fl/v9d5TK9jU7O9v461//aiQlJRmHDh0yfvzxR+Oaa64xWrVqZeTn51u34erH9ILMzEwjICDAmDVrVrnbcIVjWpXzyZ///GejadOmxvLly42NGzcavXr1Mnr16lVqO23atDEWLVpk/bkq3+3LIfzY6K233jKaNm1q+Pj4GD169DDWrl1rfe/66683Ro0aVar9559/brRu3drw8fExOnToYCxevLiGK7aNpHJf8+bNs7b5/X4+8cQT1n+T8PBw49ZbbzU2bdpU88XbYNiwYUajRo0MHx8fIzIy0hg2bJixf/9+6/u14Vhe6ocffjAkGXv37i3znqsez59++qnc39UL+2KxWIzJkycb4eHhhq+vr3HTTTeV2f/o6Ghj6tSppZZV9h13hMr289ChQxV+Z3/66SfrNn6/n5f7/XeUyvY1NzfXGDBggNGgQQPD29vbiI6ONsaOHVsmxLj6Mb3g7bffNvz9/Y2MjIxyt+EKx7Qq55O8vDzjscceM+rWrWsEBAQYd955p3Hy5Mky27l0nap8ty/HdH7DAAAAboExPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANwK4QcAALgVwg8AAHArhB8AAOBWCD8Aasxvv/0mk8mkLVu2OLoUm82fP1+hoaF2bwug5hF+ANSYqKgonTx5Uh07dnR0KTYbNmyYfv31V0eXAcAOvBxdAAD3UFhYKB8fH0VERDi6FJsVFRXJ399f/v7+ji4FgB3Q8wPAZjfccIPGjx+v8ePHKyQkRGFhYZo8ebIufVRgs2bN9Pe//10jR45UcHCwHnnkkVKXvSwWi5o0aaJZs2aV2vbmzZvl4eGhw4cPS5KmT5+uTp06KTAwUFFRUXrsscd07ty5UuusXr1aN9xwgwICAlS3bl0NHDhQZ8+e1QcffKD69euroKCgVPshQ4bogQceKHffLtS4YMECXX/99fLz89PHH39c5lLW1q1bdeONNyooKEjBwcGKjY3Vxo0by93m6dOn1b17d915551lagFQ8wg/AK7I+++/Ly8vL61fv15vvPGGpk+frvfee69Um3//+9/q0qWLNm/erMmTJ5d6z8PDQ8OHD9cnn3xSavnHH3+sPn36KDo62truzTff1M6dO/X+++9r+fLleuaZZ6ztt2zZoptuuknt27dXUlKSVq1apcGDB8tsNuvuu++W2WzWt99+a21/6tQpLV68WA899FCl+zdx4kRNmDBBu3fv1sCBA8u8P2LECDVp0kQbNmxQcnKyJk6cKG9v7zLtjh49qr59+6pjx4764osv5OvrW+nnAqgBV/ysegBu6/rrrzfatWtnWCwW67Jnn33WaNeunfXn6OhoY8iQIaXWO3TokCHJ2Lx5s2EYhrF582bDZDIZhw8fNgzDMMxmsxEZGWnMmjWrws9euHChUb9+fevPw4cPN/r06VNh+0cffdQYNGiQ9efXXnvNiImJKVV7eTXOmDGj1PJ58+YZISEh1p+DgoKM+fPnl7uNC2337NljREVFGX/5y18q/DwANY+eHwBXpGfPnjKZTNafe/XqpX379slsNluXde/evdJtdO3aVe3atbP2/vz88886deqU7r77bmubH3/8UTfddJMiIyMVFBSkBx54QOnp6crNzZV0seenImPHjtXSpUt1/PhxSSV3Yj344IOlai/P5WpPSEjQmDFjFB8fr3/+8586cOBAqffz8vLUt29fDR06VG+88cZlPw9AzSH8AKg2gYGBl20zYsQIa/j55JNPdMstt6h+/fqSSsbf/OEPf1Dnzp315ZdfKjk5WTNnzpRUMoBa0mUHIXfr1k1dunTRBx98oOTkZO3cuVMPPvjgVdf+wgsvaOfOnbrtttu0fPlytW/fXl999ZX1fV9fX8XHx+t///ufNXgBcA6EHwBXZN26daV+Xrt2rVq1aiVPT0+btnPfffdpx44dSk5O1hdffKERI0ZY30tOTpbFYtFrr72mnj17qnXr1jpx4kSp9Tt37qzExMRKP2PMmDGaP3++5s2bp/j4eEVFRdlUY0Vat26tJ598UkuXLtXQoUM1b94863seHh768MMPFRsbqxtvvLFM3QAch/AD4IocOXJECQkJ2rt3rz799FO99dZbmjBhgs3badasmXr37q2HH35YZrNZt99+u/W9li1bqqioSG+99ZYOHjyoDz/8ULNnzy61/qRJk7RhwwY99thj2rZtm/bs2aNZs2YpLS3N2ua+++7TsWPH9O677152oHNV5OXlafz48VqxYoUOHz6s1atXa8OGDWrXrl2pdp6envr444/VpUsX9e/fXykpKVf92QCuHuEHwBUZOXKk8vLy1KNHD40bN04TJkzQI488ckXbGjFihLZu3ao777yz1GWsLl26aPr06XrllVfUsWNHffzxx5o2bVqpdVu3bq2lS5dq69at6tGjh3r16qVvvvlGXl4XpzELCQnRXXfdpTp16mjIkCFXVOOlPD09lZ6erpEjR6p169a65557NGjQIL344otl2np5eenTTz9Vhw4d1L9/f506deqqPx/A1TEZxiUTcwBAFdxwww3q2rWrZsyY4ehSquymm25Shw4d9Oabbzq6FAAOxgzPAGq1s2fPasWKFVqxYoX++9//OrocAE6A8AOgVuvWrZvOnj2rV155RW3atHF0OQCcAJe9AACAW2HAMwAAcCuEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANwK4QcAALiV/w+aIdaL3oII1AAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import pandas as pd\n",
    "df=pd.DataFrame({'xvalues': X_axi, 'yvalues': Y_axi })\n",
    " \n",
    "# plot\n",
    "plt.plot( 'xvalues', 'yvalues', data=df)\n",
    "plt.xlabel(\"privacy risk\")\n",
    "plt.ylabel(\"attack accuracy\")\n",
    "# show the graph\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2ee4f5ad-e959-4f63-bde3-78c7e25fc0e4",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "48d74a89-2c75-453a-a3b2-3fd906337030",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "025ed341-77fd-4f0e-b2cf-1ac59f0e74af",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "8e1a4ee1-0c47-411a-b98b-d7713d949e09",
   "metadata": {},
   "source": [
    "### 绘制损失分布差异"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "id": "ecd477a2-9df6-4a4a-ba78-716c2d669d45",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 101,
   "id": "2abddba4-ae48-405a-beb5-c45daf231d6d",
   "metadata": {},
   "outputs": [],
   "source": [
    "idx = 31000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 102,
   "id": "0421a068-0678-435b-86b7-9f37862cc816",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 需要使用loss_data_all，pri_risk_rank，train_keep\n",
    "dat_in = []\n",
    "dat_out = []\n",
    "for i in range(loss_data_all.shape[1]):\n",
    "    dat_in.append((loss_data_all[train_keep[:,i],i]))\n",
    "    dat_out.append((loss_data_all[~train_keep[:,i],i]))\n",
    "dat_in = np.array(dat_in)\n",
    "dat_out = np.array(dat_out)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 103,
   "id": "37c2030e-d91f-4fdf-9a5a-e246ca594916",
   "metadata": {},
   "outputs": [],
   "source": [
    "mem1 = dat_in[pri_risk_rank[idx]]\n",
    "non_mem1 = dat_out[pri_risk_rank[idx]]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 104,
   "id": "5612c786-26df-4ff0-9c42-2af62fa46b0f",
   "metadata": {},
   "outputs": [],
   "source": [
    "mem1 = mem1.reshape(mem1.shape[0], 1)\n",
    "non_mem1 = non_mem1.reshape(non_mem1.shape[0], 1)\n",
    "\n",
    "arr = np.concatenate((mem1, non_mem1), 1)\n",
    "\n",
    "df = pd.DataFrame(arr, columns=['loss','out'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 105,
   "id": "5a3fd823-c33a-4c78-8829-e7bb6410bc02",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAG1CAYAAADjkR6kAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAmCUlEQVR4nO3df3DU9Z3H8dfm52YhCZDw24REBtINipEfYZRqoXJQLFaunV6dEYv0ij1KRUxFjTUi1JqWq5jRIhbuKnbUw2ur1HGQHqZ4tICCeFSRFeEMhuFXukLYkDUh2f3eHzZ7jSQh2ezu9/sJz8fMTtnv/vi+v/tt4Ol+v7txWZZlCQAAwEBJdg8AAAAQLUIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLFS7B4g3sLhsI4fP67MzEy5XC67xwEAAN1gWZYaGho0YsQIJSV1/r5Lnw+Z48ePKy8vz+4xAABAFI4eParLLrus09v7fMhkZmZK+uyFyMrKsnkaAADQHYFAQHl5eZF/xzvT50Om7XBSVlYWIQMAgGEudloIJ/sCAABjETIAAMBYff7QEgAAdguFQmppabF7DEdJTU1VcnJyr5+HkAEAIE4sy9LJkydVX19v9yiONGDAAA0bNqxXX49CyAAAECdtETNkyBB5PB6+z+xvLMtSMBhUXV2dJGn48OFRPxchAwBAHIRCoUjE5OTk2D2O42RkZEiS6urqNGTIkKgPM3GyLwAAcdB2TozH47F5Eudqe216c/4QIQMAQBxxOKlzsXhtCBkAAGAszpEBACDBamtr5ff7E7a+3Nxc5efnJ2x9iUTIAACQQLW1tSoq8qqpKZiwdbrdHh086Ot2zEybNk0lJSWqqqqK72AxQMgAAJBAfr9fTU1Beb3PyePxxn19waBPPt88+f3+bofMSy+9pNTU1DhPFhuEDAAANvB4vMrMnGD3GB0aNGiQ3SN0GyEDfE6ij13bpS8fMwfQO39/aKmgoEB33HGHDh8+rN/85jcaOHCgHnzwQd1xxx12jymJkAHasePYtV16eswcwKXrscce049//GM98MAD+u1vf6tFixbpS1/6koqKiuwejZAB/l6ij13bJZpj5gAuXTfeeKO+//3vS5Luu+8+Pf7449q2bRshAziVk49dA0CijR8/PvJnl8ulYcOGRX5Pkt34QjwAANClz3+CyeVyKRwO2zRNe4QMAAAwFoeWAACwQTDo61PrsQshAwBAAuXm5srt9sjnm5ewdbrdHuXm5iZsfYlEyAAAkED5+fk6eNDn6N+19MYbb0T+fOTIkQtu37dvX++HihFCBgCABMvPz+erD2KEk30BAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIvvkQEAIMFqa2sd/YV4JiFkAABIoNraWnmLihRsakrYOj1ut3wHDyYsZh5++GFt2rQpId8ATMgAAJBAfr9fwaYmPef1yuvxxH19vmBQ83yf/UqEvviuDCEDAIANvB6PJmRm2j1Gh5qbm7Vs2TJt3LhRgUBAkyZN0uOPP67Jkydrw4YNWrp0qerr6yP337Rpk/7xH/9RlmVpw4YNWrFihSTJ5XJJkp555hndfvvtcZmVkAEAAO3ce++9+t3vfqdnn31Wo0aN0qpVqzRr1iwdPnz4oo/91re+pf3792vLli16/fXXJUnZ2dlxm5WQAQAAEY2NjVq7dq02bNig2bNnS5LWr1+vrVu36t///d81ePDgLh+fkZGh/v37KyUlRcOGDYv7vHz8GgAARPzv//6vWlpaNHXq1Miy1NRUlZaWyufz2ThZxwgZAADQbUlJSbIsq92ylpYWm6YhZAAAwN8ZPXq00tLStGPHjsiylpYW7dmzR8XFxRo8eLAaGhrU2NgYuf3zH7NOS0tTKBRKyLycIwMAACL69eunRYsWadmyZRo0aJDy8/O1atUqBYNB/fM//7Msy5LH49EDDzygJUuW6K233tKGDRvaPUdBQYFqamq0b98+XXbZZcrMzFR6enpc5iVkAACwgS8YdOx6fvrTnyocDuu2225TQ0ODJk2apD/84Q8aOHCgJOm5557TsmXLtH79et1www16+OGHdccdd0Qe/41vfEMvvfSSpk+frvr6ej5+DQBAX5GbmyuP2615CTxx1uN2Kzc3t9v3d7vdeuKJJ/TEE090ePvcuXM1d+7cdssWLlwY+XN6erp++9vfRjVrT9kaMtu3b9e//uu/au/evTpx4oRefvnldi+MZVlavny51q9fr/r6ek2dOlVr167VmDFj7BsaAIBeyM/Pl+/gQX7XUozYGjKNjY266qqr9J3vfEdf//rXL7h91apVeuKJJ/Tss8+qsLBQFRUVmjVrlg4cOCC3223DxAAA9F5+fn6fDYtEszVkZs+eHfmync+zLEtVVVV68MEHdfPNN0uSfv3rX2vo0KHatGmTbrnllkSOCgAAHMixH7+uqanRyZMnNWPGjMiy7OxsTZkyRbt27er0cc3NzQoEAu0uAACgb3JsyJw8eVKSNHTo0HbLhw4dGrmtI5WVlcrOzo5c8vLy4jonAACwj2NDJlrl5eU6e/Zs5HL06FG7RwIAXMI+/y24+H+xeG0cGzJtv2jq1KlT7ZafOnWqy19ClZ6erqysrHYXAAASLTU1VZIUTND3xZio7bVpe62i4djvkSksLNSwYcNUXV2tkpISSVIgENBbb72lRYsW2TscAAAXkZycrAEDBqiurk6S5PF45HK5bJ7KGSzLUjAYVF1dnQYMGKDk5OSon8vWkDl37pwOHz4cud72dcZtX4m8dOlSPfLIIxozZkzk49cjRoy44Et4AABworYjCG0xg/YGDBjQ5VGW7rA1ZN5++21Nnz49cr2srEySNH/+fG3YsEH33nuvGhsbdccdd6i+vl5f/OIXtWXLFr5DBgBgBJfLpeHDh2vIkCG2/oZoJ0pNTe3VOzFtbA2ZadOmdXmij8vl0sqVK7Vy5coETgUAQGwlJyfH5B9tXMixJ/sCAABcDCEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWCl2D2Cy2tpa+f1+u8dIiNzcXOXn59s9BgAA7RAyUaqtrVVRkVdNTUG7R0kIt9ujgwd9xAwAwFEImSj5/X41NQXl9T4nj8dr9zhxFQz65PPNk9/vJ2QAAI5CyPSSx+NVZuYEu8cAAOCSxMm+AADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWI4OmVAopIqKChUWFiojI0OjR4/Wj3/8Y1mWZfdoAADAAVLsHqArP/vZz7R27Vo9++yzGjdunN5++20tWLBA2dnZWrJkid3jAQAAmzk6ZHbu3Kmbb75ZX/3qVyVJBQUF+o//+A/t3r3b5skAAIATODpkrr32Wq1bt04ffvihxo4dq7/85S/685//rNWrV3f6mObmZjU3N0euBwKBRIx6yaqtrZXf77d7jJjx+XySpGDQ1637p6bmyu3Oj+dIcLi+9jMQjdzcXOXn83MAezg6ZO6//34FAgF94QtfUHJyskKhkH7yk5/o1ltv7fQxlZWVWrFiRQKnvHTV1tbKW1SkYFOT3aPEnM83r1v3S0lya1LpQWLmEtWXfwZ6wuN2y3fwIDEDWzg6ZP7zP/9Tzz//vF544QWNGzdO+/bt09KlSzVixAjNnz+/w8eUl5errKwscj0QCCgvLy9RI19S/H6/gk1Nes7rldfjsXucmGgMBnXA51O/DK+SkrvepppwUA8GfWpp8RMyl6i++DPQU75gUPN8Pvn9fkIGtnB0yCxbtkz333+/brnlFknSlVdeqY8//liVlZWdhkx6errS09MTOeYlz+vxaEJmpt1jxESDpFZJmckeJadcZJtaEzERTNCXfgYA0zj649fBYFBJSe1HTE5OVjgctmkiAADgJI5+R+amm27ST37yE+Xn52vcuHH6n//5H61evVrf+c537B4NAAA4gKND5sknn1RFRYW+//3vq66uTiNGjND3vvc9PfTQQ3aPBgAAHMDRIZOZmamqqipVVVXZPQoAAHAgR58jAwAA0BVCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLFS7B4AMF0w6LN7hB5rm9nn6/3szc3NSk9P7/XzxNOJEydUX18f8+etqamRJDUGg2qI+bNHJzU1VW632+4xLim1tbXy+/12j2Gb3Nxc5efn27Z+QgaIkj98XkmSfL55do8StXnzej97kqRw70cx2gGfT612D/E3SUlJmlJaSswkSG1trbxFRQo2Ndk9im08brd8Bw/aFjOEDBClBqtVYUkr0ws0OjXH7nF6JBwKqvFTn4q9XvXzeKJ+ns2ffKKKI0e0vqBAE3Kc+Ro0BoM64PPJ7S5Qsisjps+9o7VeT7eckNtdoMwU+7c/FA4qGPSppaWFkEkQv9+vYFOTnvN65e3Fz5KpfMGg5vl88vv9hAxgqsKkDHlTMu0eo0dCkhoklXg8ysyMfnZfMChJKsrI0IRePE88NUhqlZSZkqPkGO+njyWp5YSSXRkxf+6oOOVtoUuQ1+Nx7M9AX8fJvgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwVlQhc/nll+uTTz65YHl9fb0uv/zyXg8FAADQHVGFzJEjRxQKhS5Y3tzcrGPHjvV6KAAAgO5I6cmdX3nllcif//CHPyg7OztyPRQKqbq6WgUFBTEbDgAAoCs9Cpm5c+dKklwul+bPn9/uttTUVBUUFOixxx6L2XAAAABd6VHIhMNhSVJhYaH27Nmj3NzcuAwFAADQHT0KmTY1NTWxngMAAKDHogoZSaqurlZ1dbXq6uoi79S0+dWvftXrwQAAAC4mqk8trVixQjNnzlR1dbX8fr/OnDnT7hJLx44d07x585STk6OMjAxdeeWVevvtt2O6DgAAYKao3pF5+umntWHDBt12222xnqedM2fOaOrUqZo+fbpee+01DR48WIcOHdLAgQPjul4AAGCGqELm/Pnzuvbaa2M9ywV+9rOfKS8vT88880xkWWFhYdzXCwAAzBDVoaXvfve7euGFF2I9ywVeeeUVTZo0Sd/85jc1ZMgQXX311Vq/fn2Xj2lublYgEGh3AQAAfVNU78g0NTVp3bp1ev311zV+/Hilpqa2u3316tUxGe6jjz7S2rVrVVZWpgceeEB79uzRkiVLlJaWdsH32LSprKzUihUrYrJ+AADgbFGFzLvvvquSkhJJ0v79+9vd5nK5ej1Um3A4rEmTJunRRx+VJF199dXav3+/nn766U5Dpry8XGVlZZHrgUBAeXl5MZsJAAA4R1Qhs23btljP0aHhw4eruLi43TKv16vf/e53nT4mPT1d6enp8R4NAAA4QFTnyCTK1KlTdfDgwXbLPvzwQ40aNcqmiQAAgJNE9Y7M9OnTuzyE9Mc//jHqgf7e3XffrWuvvVaPPvqo/umf/km7d+/WunXrtG7dupg8PwAAMFtUIdN2fkyblpYW7du3T/v37+/03JVoTJ48WS+//LLKy8u1cuVKFRYWqqqqSrfeemvM1gEAAMwVVcg8/vjjHS5/+OGHde7cuV4N9Hlz5szRnDlzYvqcAACgb4jpOTLz5s3j9ywBAICEiWnI7Nq1S263O5ZPCQAA0KmoDi19/etfb3fdsiydOHFCb7/9tioqKmIyGAAAwMVEFTLZ2dntriclJamoqEgrV67UzJkzYzIYAADAxUQVMn//SxwBAADsElXItNm7d698Pp8kady4cbr66qtjMhQAAEB3RBUydXV1uuWWW/TGG29owIABkqT6+npNnz5dGzdu1ODBg2M5IwAAQIei+tTSnXfeqYaGBr3//vs6ffq0Tp8+rf379ysQCGjJkiWxnhEAAKBDUb0js2XLFr3++uvyer2RZcXFxVqzZg0n+wIAgISJ6h2ZcDis1NTUC5anpqYqHA73eigAAIDuiCpkvvzlL+uuu+7S8ePHI8uOHTumu+++WzfccEPMhgMAAOhKVCHzi1/8QoFAQAUFBRo9erRGjx6twsJCBQIBPfnkk7GeEQAAoENRnSOTl5end955R6+//ro++OADSZLX69WMGTNiOhwAAEBXevSOzB//+EcVFxcrEAjI5XLpH/7hH3TnnXfqzjvv1OTJkzVu3Dj96U9/itesAAAA7fQoZKqqqrRw4UJlZWVdcFt2dra+973vafXq1TEbDgAAoCs9Cpm//OUv+spXvtLp7TNnztTevXt7PRQAAEB39ChkTp061eHHrtukpKTor3/9a6+HAgAA6I4ehczIkSO1f//+Tm9/9913NXz48F4PBQAA0B09Cpkbb7xRFRUVampquuC2Tz/9VMuXL9ecOXNiNhwAAEBXevTx6wcffFAvvfSSxo4dqx/84AcqKiqSJH3wwQdas2aNQqGQfvSjH8VlUAAAgM/rUcgMHTpUO3fu1KJFi1ReXi7LsiRJLpdLs2bN0po1azR06NC4DAoAAPB5Pf5CvFGjRmnz5s06c+aMDh8+LMuyNGbMGA0cODAe8wEAAHQqqm/2laSBAwdq8uTJsZwFAACgR6L6XUsAAABOEPU7MvhMMOize4S4a9vGzZs3y+f7/+2tqamRJDUGg2qwZbLYawwG7R4BQDfU1tbK7/fbPUbk78R4/j2Ympoqt9sdp2c3HyETpRMnTihJks83z+5REqaioqLD5Qd8PrUmeJZ4C1uWku0eAkCHamtrVVTkVVOTc/7DI55/DyYlJWlKaSkx0wlCJkr19fUKS1qZXqDRqTl2jxNXLa2fqKnpiNLTRikl2RNZvqO1Xk+3nJDbXaDMlL7xGrS0nlZTU03kE3kAnMfv96upKSiv9zl5PF5bZwkGffL55qlfhleZf/f3Y6yEwkEFgz61tLQQMp0gZHqpMClD3pRMu8eIq/PhoIKSPCmDlJaWHVn+sSS1nFCyK0PJfeQ1CIWd8194ALrm8XiVmTnB7jEkSUnJnvj8PdjX3u6OA072BQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxjIqZH7605/K5XJp6dKldo8CAAAcwJiQ2bNnj375y19q/Pjxdo8CAAAcwoiQOXfunG699VatX79eAwcOtHscAADgECl2D9Adixcv1le/+lXNmDFDjzzySJf3bW5uVnNzc+R6IBCI93iAsRqDwV49/tNPP438b0NDQyxGirnebiMAZ3N8yGzcuFHvvPOO9uzZ0637V1ZWasWKFXGeCjBb2DovSfL5fL16npq2/z1yRKlHjvRuqDgLW5aS7R4CQMw5OmSOHj2qu+66S1u3bpXb7e7WY8rLy1VWVha5HggElJeXF68RASNZVqskye0uUmpK/6ifJ6PlE6n5iNzuAmWm5MRqvJhqaT2tpqYaWZZl9ygA4sDRIbN3717V1dVpwoQJkWWhUEjbt2/XL37xCzU3Nys5uf1/Y6Wnpys9PT3RowJGSkryKDklM+rHu0KfHbZJdmX06nniKRTm0BLQlzk6ZG644Qa999577ZYtWLBAX/jCF3TfffddEDEAAODS4uiQyczM1BVXXNFuWb9+/ZSTk3PBcgAAcOkx4uPXAAAAHXH0OzIdeeONN+weAQAAOATvyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYzk6ZCorKzV58mRlZmZqyJAhmjt3rg4ePGj3WAAAwCEcHTL//d//rcWLF+vNN9/U1q1b1dLSopkzZ6qxsdHu0QAAgAOk2D1AV7Zs2dLu+oYNGzRkyBDt3btX119/vU1TAQAAp3B0yHze2bNnJUmDBg3q9D7Nzc1qbm6OXA8EAnGfCwCcpDEYTPi6Nm/eLJ/Pl5B11tTUSJKCwcSsrytOmOFSZ0zIhMNhLV26VFOnTtUVV1zR6f0qKyu1YsWKBE4GAM4Qts5LUsKCQpLe1GfnKFRUVCRsnW18vnkJX2dnrL+99kg8Y0Jm8eLF2r9/v/785z93eb/y8nKVlZVFrgcCAeXl5cV7PACwnWW1SpLc7iKlpvRPyDpDLZ8o3HxED6eN0Ojk7ISsszV0Vs3njyd0Ozuzo/UTPdV0RFa41dY5LmVGhMwPfvADvfrqq9q+fbsuu+yyLu+bnp6u9PT0BE0GAM6TlORRckpmQtblCn12aGl0crbGpQ9NyDrPn5eC54/Lk+RRWoK2szM1ocQdxkPHHB0ylmXpzjvv1Msvv6w33nhDhYWFdo8EAAAcxNEhs3jxYr3wwgv6/e9/r8zMTJ08eVKSlJ2drYyMDJunAwAAdnP098isXbtWZ8+e1bRp0zR8+PDI5cUXX7R7NAAA4ACOfkfGsiy7RwAAAA7m6HdkAAAAukLIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMZUTIrFmzRgUFBXK73ZoyZYp2795t90gAAMABHB8yL774osrKyrR8+XK98847uuqqqzRr1izV1dXZPRoAALCZ40Nm9erVWrhwoRYsWKDi4mI9/fTT8ng8+tWvfmX3aAAAwGYpdg/QlfPnz2vv3r0qLy+PLEtKStKMGTO0a9euDh/T3Nys5ubmyPWzZ89KkgKBQExnCwaDkqT3W8/oUysU0+d2mtZQQM2S0lv8Sgk3RpZ/1PrZa9uXXoPOtrUjJm9/T7azKya8BrHa1o44bfvjua2dseM1sGM7OxPv7Q9ZTWqS1NzQIE/I/v+Pfd7Bv/1beO7cuZj/O9v2fJZldX1Hy8GOHTtmSbJ27tzZbvmyZcus0tLSDh+zfPlySxIXLly4cOHCpQ9cjh492mUrOPodmWiUl5errKwscj0cDuv06dPKycmRy+WycTIzBAIB5eXl6ejRo8rKyrJ7HMQA+7TvYZ/2LezPjlmWpYaGBo0YMaLL+zk6ZHJzc5WcnKxTp061W37q1CkNGzasw8ekp6crPT293bIBAwbEa8Q+Kysrix+oPoZ92vewT/sW9ueFsrOzL3ofR5/sm5aWpokTJ6q6ujqyLBwOq7q6Wtdcc42NkwEAACdw9DsyklRWVqb58+dr0qRJKi0tVVVVlRobG7VgwQK7RwMAADZzfMh861vf0l//+lc99NBDOnnypEpKSrRlyxYNHTrU7tH6pPT0dC1fvvyCw3MwF/u072Gf9i3sz95xWdbFPtcEAADgTI4+RwYAAKArhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDHqlpqZG06dPV3Fxsa688ko1Ntr7C9zQe8FgUKNGjdI999xj9yjopaNHj2ratGkqLi7W+PHj9Zvf/MbukdBDr776qoqKijRmzBj927/9m93jOBIfv0avfOlLX9Ijjzyi6667TqdPn1ZWVpZSUhz/9UTowo9+9CMdPnxYeXl5+vnPf273OOiFEydO6NSpUyopKdHJkyc1ceJEffjhh+rXr5/do6EbWltbVVxcrG3btik7O1sTJ07Uzp07lZOTY/dojsI7Moja+++/r9TUVF133XWSpEGDBhExhjt06JA++OADzZ492+5REAPDhw9XSUmJJGnYsGHKzc3V6dOn7R0K3bZ7926NGzdOI0eOVP/+/TV79mz913/9l91jOQ4h04dt375dN910k0aMGCGXy6VNmzZdcJ81a9aooKBAbrdbU6ZM0e7du7v9/IcOHVL//v110003acKECXr00UdjOD0+L977U5LuueceVVZWxmhiXEwi9mmbvXv3KhQKKS8vr5dTo7t6u3+PHz+ukSNHRq6PHDlSx44dS8ToRiFk+rDGxkZdddVVWrNmTYe3v/jiiyorK9Py5cv1zjvv6KqrrtKsWbNUV1cXuU9JSYmuuOKKCy7Hjx9Xa2ur/vSnP+mpp57Srl27tHXrVm3dujVRm3fJiff+/P3vf6+xY8dq7NixidqkS16892mb06dP69vf/rbWrVsX923C/4vF/kU3WLgkSLJefvnldstKS0utxYsXR66HQiFrxIgRVmVlZbeec+fOndbMmTMj11etWmWtWrUqJvOia/HYn/fff7912WWXWaNGjbJycnKsrKwsa8WKFbEcG12Ixz61LMtqamqyrrvuOuvXv/51rEZFFKLZvzt27LDmzp0buf2uu+6ynn/++YTMaxLekblEnT9/Xnv37tWMGTMiy5KSkjRjxgzt2rWrW88xefJk1dXV6cyZMwqHw9q+fbu8Xm+8RkYXYrE/KysrdfToUR05ckQ///nPtXDhQj300EPxGhkXEYt9almWbr/9dn35y1/WbbfdFq9REYXu7N/S0lLt379fx44d07lz5/Taa69p1qxZdo3sWITMJcrv9ysUCl3wW8SHDh2qkydPdus5UlJS9Oijj+r666/X+PHjNWbMGM2ZMyce4+IiYrE/4Syx2Kc7duzQiy++qE2bNqmkpEQlJSV677334jEueqg7+zclJUWPPfaYpk+frpKSEv3whz/kE0sd4CMm6JXZs2fzCZc+6Pbbb7d7BMTAF7/4RYXDYbvHQC987Wtf09e+9jW7x3A03pG5ROXm5io5OVmnTp1qt/zUqVMaNmyYTVMhWuzPvod92rexf2OHkLlEpaWlaeLEiaquro4sC4fDqq6u1jXXXGPjZIgG+7PvYZ/2bezf2OHQUh927tw5HT58OHK9pqZG+/bt06BBg5Sfn6+ysjLNnz9fkyZNUmlpqaqqqtTY2KgFCxbYODU6w/7se9infRv7N0Hs/tgU4mfbtm2WpAsu8+fPj9znySeftPLz8620tDSrtLTUevPNN+0bGF1if/Y97NO+jf2bGPyuJQAAYCzOkQEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5AB4CjTpk3T0qVL7R4DgCEIGQAAYCxCBgAAGIuQAeBYZ86c0be//W0NHDhQHo9Hs2fP1qFDhyK3f/zxx7rppps0cOBA9evXT+PGjdPmzZsjj7311ls1ePBgZWRkaMyYMXrmmWfs2hQAcZJi9wAA0Jnbb79dhw4d0iuvvKKsrCzdd999uvHGG3XgwAGlpqZq8eLFOn/+vLZv365+/frpwIED6t+/vySpoqJCBw4c0Guvvabc3FwdPnxYn376qc1bBCDWCBkAjtQWMDt27NC1114rSXr++eeVl5enTZs26Zvf/KZqa2v1jW98Q1deeaUk6fLLL488vra2VldffbUmTZokSSooKEj4NgCIPw4tAXAkn8+nlJQUTZkyJbIsJydHRUVF8vl8kqQlS5bokUce0dSpU7V8+XK9++67kfsuWrRIGzduVElJie69917t3Lkz4dsAIP4IGQDG+u53v6uPPvpIt912m9577z1NmjRJTz75pCRp9uzZ+vjjj3X33Xfr+PHjuuGGG3TPPffYPDGAWCNkADiS1+tVa2ur3nrrrciyTz75RAcPHlRxcXFkWV5env7lX/5FL730kn74wx9q/fr1kdsGDx6s+fPn67nnnlNVVZXWrVuX0G0AEH+cIwPAkcaMGaObb75ZCxcu1C9/+UtlZmbq/vvv18iRI3XzzTdLkpYuXarZs2dr7NixOnPmjLZt2yav1ytJeuihhzRx4kSNGzdOzc3NevXVVyO3Aeg7eEcGgGM988wzmjhxoubMmaNrrrlGlmVp8+bNSk1NlSSFQiEtXrxYXq9XX/nKVzR27Fg99dRTkqS0tDSVl5dr/Pjxuv7665WcnKyNGzfauTkA4sBlWZZl9xAAAADR4B0ZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsf4PQCpQwnVN9OIAAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import seaborn as sns\n",
    "import matplotlib.pyplot as plt\n",
    "  \n",
    "\n",
    "sns.histplot(data=df, x=\"loss\", color=\"blue\", label=\"in\", log_scale=True)\n",
    "sns.histplot(data=df, x=\"out\", color=\"red\", label=\"out\", log_scale=True)\n",
    "\n",
    "plt.legend() \n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "05bd3cc7-e9f6-438b-9e72-7a67d682b590",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 106,
   "id": "3501d38f-7388-44cb-8b49-3c03c82ab8c8",
   "metadata": {},
   "outputs": [],
   "source": [
    "def imshow(img):\n",
    "    img = img\n",
    "    npimg = img\n",
    "    plt.imshow(npimg)\n",
    "    plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 107,
   "id": "94652e56-d847-4409-89fa-99e1b48e52d5",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaAAAAGdCAYAAABU0qcqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAvuUlEQVR4nO3de3Cc9X3v8c/eV9eVdZcsycgXbIwvaRwwOiSUYNeXnjIQPB1IcqYmZWCgMlNw0yTuJBBoe0TJTEKSccwfpbiZE0NCTwyFaaBgsDhJbFI7OI4TomBHYBlb8lX322r3OX+kqBUY+H1tyT9JvF8zO2Npv/7q9zzP7n71aHc/GwqCIBAAABdY2PcCAAAfTgwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXUd8LeKdsNqujR4+qoKBAoVDI93IAAEZBEKinp0fV1dUKh9/7PGfSDaCjR4+qtrbW9zIAAOepra1NNTU173n9hA2gzZs362tf+5ra29u1dOlSffvb39bll1/+gf+voKBAkrS4rlaR95mc/92MGQnndd100xrnWklq7+x3rj3dbztjy2YyzrUZQ60kxaIR59rKsjJT7+GRrKm+te2Ie3EoZuqdSOQ413Z1njH17u0ddK4Nq8DUe2697Zes3Nx859pMJtfU+5e/es25Nq/E1vvKVZ9wrj145JCpd39vt3PtJbPnmXq3tLxpqo+G3e+fJfnut1lJioy4P64UlJaaep860+VcO9PQe2BgQJ/fePvo4/l7mZAB9P3vf18bN27Uww8/rOXLl+uhhx7S6tWr1dLSovLy8vf9v2//2S0SDjsPoGjE/cE2J+k+rCQpmRxxrk1kpuYAShr3Sdg4gOLxuHuxeQC5947Hbb1jMfd9HpZhGyUlEklTfdJQn8nYHuBiMcs+tN1WcnLdB1YiaVv3yMjwhKxDkuLG4xMzDKBk0tbbMoBycmzbmRww7ENjb0kf+DTKhLwI4etf/7puvfVWfe5zn9PChQv18MMPKzc3V//0T/80ET8OADAFjfsAGh4e1t69e7Vy5cr/+iHhsFauXKldu3a9q35oaEjd3d1jLgCA6W/cB9DJkyeVyWRUUVEx5vsVFRVqb29/V31TU5NSqdTohRcgAMCHg/f3AW3atEldXV2jl7a2Nt9LAgBcAOP+IoTS0lJFIhF1dHSM+X5HR4cqKyvfVZ9IJJRI2J7YBABMfeN+BhSPx7Vs2TLt2LFj9HvZbFY7duxQQ0PDeP84AMAUNSEvw964caPWr1+vj33sY7r88sv10EMPqa+vT5/73Ocm4scBAKagCRlAN954o06cOKF77rlH7e3t+shHPqJnn332XS9MAAB8eE1YEsKGDRu0YcOGc/7/rSczCoUCp9og6VYnSYcOH7YtJOL+5rjuE2lT66qzPCf2Xkxv5pSUTLrXV5WWmHqXV73/m4nfafHiuc61pzs7Tb17utzfFDs4UGXqnR5xfxPyqRMDpt6ZIfc3AErSb1rd0woGh21viC4vd78dnuo6Yep98Bd7nWuPnHzL1Du/0P2+GQy7v+NfknqO/MZUX17inoSRTBabeh9pdU8SSQYzTb2r8t2ffy+JuN+u+iNu9wfvr4IDAHw4MYAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeTFgUz/nKJqSQ43gczOY59z3Y6h7dIkknTvzWufadH0HxQerqZjnXlpaVmnovXHCxc23nGdun0Pb09Zrq82e4x5QosP1ONNDvHoGTzdqOfSLhHmc0MtJp6t1zxlbf8pufOdcGEds+LCq73Lk2GbVF2kTSGefaYvdkHUlSXsL9ft91vMfUO5bjHsMkSZHcQefavPKYqXf/4X7n2t2vHjD1rqmqca7trYo41w4Ouu0PzoAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXkzaLLggk5GygVPt6TPu2WS/+M2bpnWkCt13UTiZNPU+cvy4c+1Axj1TS5I+8rGPOde2tr1h6j1oyPeSpIEh91ytpR+9zNT75JnTzrUzig2ZdJJOnD7lXDscuN1W35bJ2Orr69wzu5IFtqyxvJyQc21ne6epd3972rn2ik9eber9w6d/5Fwb2G6y6u11v29K0qGDQ8617cfaTL3rL1roXBuWLfPulZ//3Ln2oovOONcODw871XEGBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwYtJG8UQzcYVCbvOxsDDPuW9ukXvsiCQFkaxzbWVNnam3a1yFJJVXVJh6h+IJ59pfvHbQ1HvR4o+a6t889pZz7f4DtqikM2fc43IuTs4y9e443elcm4jmmHofP91nqi/ML3OujSVsv1eG5R5RVJyaberde9o9viU84n5fk6R589xv4/HIoKn3ySO2OCMp7lxZWZpv6pzMuO+XmSW2Y5+zNNe5tqevw7k2nHWL3+IMCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAODFpM2CC0UyCocCp9rKyhLnvlnZMqG6urqdawf7Mqbevb29zrU5CVt+1KmTXc61x46fNvVeGnfPvZKk4tJC59q2I7819Q5C7jlZI6o29Y7H3PPAynLdM7UkqT3mdtt+23DWvX5kwPZ75cnOk861uTm2zLueYffb+FsnfmfqvWCOewbkRbPcHyMkKT93nqk+EXO/TwwPpUy9f/TsUefakhz3fElJuuqKcufan+0dcq4dHEw71XEGBADwYtwH0Fe/+lWFQqExlwULFoz3jwEATHET8ie4Sy+9VC+88MJ//ZDopP1LHwDAkwmZDNFoVJWVlRPRGgAwTUzIc0Cvv/66qqurNXv2bH32s5/V4cOH37N2aGhI3d3dYy4AgOlv3AfQ8uXLtXXrVj377LPasmWLWltb9YlPfEI9PT1nrW9qalIqlRq91NbWjveSAACT0LgPoLVr1+pP//RPtWTJEq1evVr/9m//ps7OTv3gBz84a/2mTZvU1dU1emlraxvvJQEAJqEJf3VAUVGRLr74Yh08ePCs1ycSCSUS7p/tDgCYHib8fUC9vb06dOiQqqqqJvpHAQCmkHEfQJ///OfV3NysN954Qz/96U/1qU99SpFIRJ/+9KfH+0cBAKawcf8T3JEjR/TpT39ap06dUllZmT7+8Y9r9+7dKisrszUKMgrkFj/y1lvuzxsNp21RPPl57hE4eTm2PyXmGdJbsoYolt/Xu9dGIrabQcjxuLxtaKDfuXbEeHxy8pPOtV1nbJFDQ4YYpoEBWwxTeZ97rIkkxWLuB7Qvz3DwJQ0bbraDsQFT77wZ7veftG0XquOY+1oSxke6krKIrb7IfSd2nnG/P0hSZ5d7fU3pTFPvtrfO/tTI2Qym3aOPBkfczm3GfQA9/vjj490SADANkQUHAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPBiwj+O4VzFYnGFw27zMRJxz22KBrZNjifizrV5ee5ZSZKUn++ek2WplaQewyfL1lRVm3qf6Dhhqu/v7XOuzQyPmHr3dfc61x7LHjH1zvS4Z3CdNuZ75aWHTfWFxe7HPx5LmXpXlBY614aTOabekZEC59oe44ch95xxr/1ti+1zxkIRW+ZdRYn7PozJdnxmFM5zro0maky9D7zmngU3MOL+ODs07FbLGRAAwAsGEADACwYQAMALBhAAwAsGEADACwYQAMALBhAAwAsGEADACwYQAMALBhAAwItJG8UTjUYUDrvFOaRS7tEWhYW2uJz+fvdIjjNnDNkgknJy3GNN2tvbTb1PnTrlXJuXm2vq3fbmYVP9wID7PjxxosPUuyDlHlETZDOm3n2nu5xrB9vd97ckZROBqT4n4R71Uxy3xfzE+04714bDMVPvUMz9thWL2h6OoiH3tWSzZabeuQW2tWQzCefaglSJqfdg2j0CxxoJtXDhCufaX/z6t861mcyQUx1nQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvJm0W3MDgoMIht/nY1tbm3Le4pMi0jq4u9zyw/j73zDNJyslJOtdms7bssGjEPT8qJ+G+DkkKmaql9MiIe+2wLccsoqxzbV6BLQewJ+1+PHsCt+yrt40M2vZicNQ9Z7C3131/S1KxIR8xmXTPL5SkWNL9dtgzYrv/9PS61weBbX8XFxeb6udftMS5dk5dtan3wdY3nGtHRmxZcMq459Llxtzz7iKOd0vOgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeTNosuExmRFnHLLi+Lvf8o4EBW1ZSOOw+o9NpWwbX8HDPhKxDkmIx90MbZDKm3tFozFSflXuOXdYYNNfXO+hcmzzda+odjhU61wbRPlPvxJDxrjfgvmOyWdvxHJT77Xawu9vUuy990rm2u9e2D6MJ92yyyqqZpt6tb7jnS0pSeVHKuTbXsG5Jyst3z+rrG7Ddxg++cci59kyn+7Ecdsx05AwIAOCFeQC9/PLLuvbaa1VdXa1QKKQnn3xyzPVBEOiee+5RVVWVcnJytHLlSr3++uvjtV4AwDRhHkB9fX1aunSpNm/efNbrH3zwQX3rW9/Sww8/rFdeeUV5eXlavXq1Bgfd/1QCAJj+zM8BrV27VmvXrj3rdUEQ6KGHHtKXv/xlXXfddZKk7373u6qoqNCTTz6pm2666fxWCwCYNsb1OaDW1la1t7dr5cqVo99LpVJavny5du3addb/MzQ0pO7u7jEXAMD0N64DqL29XZJUUVEx5vsVFRWj171TU1OTUqnU6KW2tnY8lwQAmKS8vwpu06ZN6urqGr1YPl4bADB1jesAqqyslCR1dHSM+X5HR8fode+USCRUWFg45gIAmP7GdQDV19ersrJSO3bsGP1ed3e3XnnlFTU0NIznjwIATHHmV8H19vbq4MGDo1+3trZq3759Ki4uVl1dne666y793d/9nebNm6f6+np95StfUXV1ta6//vrxXDcAYIozD6A9e/bok5/85OjXGzdulCStX79eW7du1Re+8AX19fXptttuU2dnpz7+8Y/r2WefVTKZNP2cIMhIjhEuOckcQ1/TMpTJZJ1rs+6l/9nbPTIlErH1ltx7RyPGKB5jlEg47B4jkw5sOzETuEV+SNJg5xlT72zy7H82Ppv8uHsUiyQp3fHBNf/NjFz3u+pQ1n2fSFJbe6d772HbH02iQdq5NqfAPXJGkmpnusfrfOQP/sDU2/rm+fYTZ3+R1dkc6Sg19a6vc9/Okf4BU2/F3B8QC0uKnWuHhoac6swD6Oqrr1bwPo/ioVBI999/v+6//35rawDAh4j3V8EBAD6cGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvzFE8F8rISEahkFsu2NCQe5ZZ2BqqZgiPs2S7/b61e++w8VeFiGE78wsKTL1dc57eFhjy3RKGXD9JiuXFnWvzM/2m3hXJHufaqoR75pkkLUj1murnxd33y5EB930iSf+n3f34HMqfYepdEnPPAYzGbQ9HpSUlzrWzZ9ebei9ffrmp/vHHvudc++bhw6bec+tnOdeWF9mOz8Cg+325d2jEuTYaGXSq4wwIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAODFpI3iscTUJJNJ51prjIzck0TM4nH3yBTLNkpSJuMem6GQbSNThYWm+vSAWyyHJGXS7sddkirkvl/+sMwWUfM/qtwjimbnG/a3pLzWDlt9l3uM0FDUtp2dqVzn2v8btkVZDaTdY37SaVuc0fWfut659n/+yZ+YeoeN2VdvvvE759r/t/MlU++eHvdIqOoSWxRP0nA8M9lh59qQYyoZZ0AAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALyZtFlwkElEo5DYfIxH3PKNs1j2bSrJl0llZsuCsLOvu6u6yNc/NM5UXJN3rk4MDpt7/K93mXLtmxHYsZyQWOdfm/8FKU+/2DsewrP/UefQXzrU56dOm3otC7tl++3NsmYQDF811rn2r/Zip98kTJ51rrVmKlscUSVqyZIlz7c92/dTU+/Rp9+NZlrLdNwO53ye6O93396Bj5iZnQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALyZtFE88HneO4gkC93gdayRHNOq+izIZW7xKOp2ekFpJCkfcf7eIJmw3gyBjizPKTbjv85m5uabeHxnoda6ddard1Lsn53Xn2s4O98gZSUrGbfswXmqoHbJFDi2U+z7/aEmVqffusHukTX5evqn3Sy+95Fx72fLLTb3fOvqWqf6Zf/1X59qcnBxT7/x89/0ybIwaG0m7ReZI0vBwt3NtenjYqY4zIACAFwwgAIAX5gH08ssv69prr1V1dbVCoZCefPLJMdfffPPNCoVCYy5r1qwZr/UCAKYJ8wDq6+vT0qVLtXnz5vesWbNmjY4dOzZ6eeyxx85rkQCA6cf8IoS1a9dq7dq171uTSCRUWVl5zosCAEx/E/Ic0M6dO1VeXq758+frjjvu0KlTp96zdmhoSN3d3WMuAIDpb9wH0Jo1a/Td735XO3bs0D/8wz+oublZa9eufc+XKDc1NSmVSo1eamtrx3tJAIBJaNzfB3TTTTeN/nvx4sVasmSJ5syZo507d2rFihXvqt+0aZM2btw4+nV3dzdDCAA+BCb8ZdizZ89WaWmpDh48eNbrE4mECgsLx1wAANPfhA+gI0eO6NSpU6qqsr2DGgAwvZn/BNfb2zvmbKa1tVX79u1TcXGxiouLdd9992ndunWqrKzUoUOH9IUvfEFz587V6tWrx3XhAICpzTyA9uzZo09+8pOjX7/9/M369eu1ZcsW7d+/X//8z/+szs5OVVdXa9WqVfrbv/1bJRIJ088Jy/30LGTIm8ox5JJJUjwec67NZm1ZcKdPdzrXZtK23hHHHD1JkrF3Nmyr7x8YcK4NyspNvQ9Gq51r64Zs687p73SuDSfeMPUenmfLa8vMme9c2/pGj6n3wQ73rLFW4z783cnfOdfGjY8Rw1m3vDFJavrff2/qffL4SVO95H48Fy6YZ+qcX1DgXBuL2x7fsobsuJrqGufagcFBpzrzALr66qsVBO+9s5977jlrSwDAhxBZcAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAAL8b984DGSywcVTjsNh9HRkac+8Yjtk1ORA1ZcIFtnkdCIefaZNyWk1VcVORcmxN3z9KTpJBh3ZI0nHbP7BowruW1spnuvVs7Tb1Let3XPXLU1vt0xrYPT/2u37m27aR7vpckvTXsltslSb3JLlPviOF4dnafsfWOuPc+fcqW7VaUP8NUX1pa4lxrzaOMGfIo49G4qXck5n47DGK57n3DbvmPnAEBALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALyYtFE8Fq6RPZIUki0CJWTobQuRkQytlV+QZ+o9c2aFc20kFJh6W6N4gqx7NMzxM6dMvffl1DnX7sm6R4lIUqS717k2+5NOU+9oXpmpfrDT/fgHEdvxHMxxPz7DYffYK0kKhtxvK4mYe+SMJEUNUTxxY5RVXtIWl5OX496/qDDf1Dtp2C9x4z7MGB4Pg8D9djWSyTjVcQYEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8GLSZsGFQiHnzLGkIbcpkDEna2DIuTaRtOUwJZNx59qLLppp6n39ddc61545cdrU+7XXXjPV5+W6Z7C9/rtWU++u3n7n2iBly1873eN+7HNO2bLD4kHaVJ+OumVrSVIwaMtrG+h37x1NuN9mJSlmyA+LWsIRJYUNOWbGeDyFAvd9IkmJqPvaZ6QKTb1zDI8TxphGU66jpdY1n5MzIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAF5M2iicIAgWOMR7ZbNa5ryW2R5LC4Yh7bcSWg2GJ4pkzZ5apdzzu/rtFQUGeqfe8eXNM9SdPnXSunVlZbur95u8OO9fm5tq2syCWcK7NGGJ7JKm/+5SpPpR0z5LJDrjfZiVJI+63w7Dc94kkKeIeaRMyROtIUsRwfwsZo3jykrbtLC8tca4tKsw39Y7I/fEtZIwas8TrBIbHWTk+dnMGBADwwjSAmpqadNlll6mgoEDl5eW6/vrr1dLSMqZmcHBQjY2NKikpUX5+vtatW6eOjo5xXTQAYOozDaDm5mY1NjZq9+7dev7555VOp7Vq1Sr19fWN1tx99916+umn9cQTT6i5uVlHjx7VDTfcMO4LBwBMbabngJ599tkxX2/dulXl5eXau3evrrrqKnV1demRRx7Rtm3bdM0110iSHn30UV1yySXavXu3rrjiivFbOQBgSjuv54C6urokScXFxZKkvXv3Kp1Oa+XKlaM1CxYsUF1dnXbt2nXWHkNDQ+ru7h5zAQBMf+c8gLLZrO666y5deeWVWrRokSSpvb1d8XhcRUVFY2orKirU3t5+1j5NTU1KpVKjl9ra2nNdEgBgCjnnAdTY2KgDBw7o8ccfP68FbNq0SV1dXaOXtra28+oHAJgazul9QBs2bNAzzzyjl19+WTU1NaPfr6ys1PDwsDo7O8ecBXV0dKiysvKsvRKJhBIJ43sLAABTnukMKAgCbdiwQdu3b9eLL76o+vr6MdcvW7ZMsVhMO3bsGP1eS0uLDh8+rIaGhvFZMQBgWjCdATU2Nmrbtm166qmnVFBQMPq8TiqVUk5OjlKplG655RZt3LhRxcXFKiws1J133qmGhgZeAQcAGMM0gLZs2SJJuvrqq8d8/9FHH9XNN98sSfrGN76hcDisdevWaWhoSKtXr9Z3vvOdcVksAGD6MA0gl2y2ZDKpzZs3a/Pmzee8KEkqLStTJOKWaXXs6FHnvp2dnaZ1FBamnGtz83JMvUNh9xymgsJcU++8fPfMuxmFRabeXd1nTPXptHtOWm6BbTtHDNlXvT39pt45Cfd92DcyaOo9lLU975mbdl9LoGFT7yDqntcWThjywCQlwu45c5ZcMkkKh9yfQZjxjlfmfpBZdbZX4xYVFTrXxqK2rL54zPIwbXtdWTbsfv8JLDlzjo9tZMEBALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALw4p49juBB6+voUDrtFVkQNURVFSffIDEnq7upxru3v7zX1Lky5x6sUF5WYepeVlDnXhmWLBlmyZImpfmBgwLm27cgRU+9ojnukzWCv+zokKSb3iJq8wnxT7/yQe0SNJGXT7jEow0GfqXc4POJcGzXER0mSsu7rjkRtD0czimY4186pn23qXVhoi9WKGdZeUGB7DIrF3B8nhkdsUUlByP02rrDhfMWxljMgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBeTNguuu7dX4ZDbfCybkXLue8Xly0zriMfcs8ZafvtbU+/ftbrXDw4MmXqXlVU41w4NDJp6x2IxU/38+Rc717a0tJh6R+SefZVK2fLaNOKek5Ubt2WHBcZf/YYiaUO1rXkm454FGBgy6SQpGnd/iKkoLzf1rqqqcq7NSbrfjyUpHrdl9VXPnOlcm0q5Z9iZhdxz/SQpMOQdWo68ay1nQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALyZtFE8yGlU47DYfZ9XWOPddtPAS0zpmGiI2rlhui/l5dd/PnWvDtgQUdZ4+41ybk2OLkRkYGDDV9/f3O9dGou6xMJKUybpH8STitt5B1n2np0csUTlSJuMegSJJI1n3iJVQ4L5PJCkRdX8YyM/PM/WuqnSPy6msrDT1tkgYo3guumiWqb6iwj36KhQKmXqPjNjidaYSzoAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXkzaLLhYNOycBVdd5Z7DVFNty5sqyHfPkJqRsmWqVa5a6Vz7xhtvmHr3dfc61/b09Jh6nzhxwlS/a9cu59qwMSfLUp1O2/Lawhn3LLhIxJYzFzH+6hc15LUlk/mm3oWFhc61qaIiU+9kMulcm8kMm3oXFxc7185fMN/Uu7S01FRvudlacwCDwBgEOYVwBgQA8MI0gJqamnTZZZepoKBA5eXluv7669XS0jKm5uqrr1YoFBpzuf3228d10QCAqc80gJqbm9XY2Kjdu3fr+eefVzqd1qpVq9TX1zem7tZbb9WxY8dGLw8++OC4LhoAMPWZngN69tlnx3y9detWlZeXa+/evbrqqqtGv5+bmzuhn+0BAJj6zus5oK6uLknvfjLwe9/7nkpLS7Vo0SJt2rTpfT+QbGhoSN3d3WMuAIDp75xfBZfNZnXXXXfpyiuv1KJFi0a//5nPfEazZs1SdXW19u/fry9+8YtqaWnRD3/4w7P2aWpq0n333XeuywAATFHnPIAaGxt14MAB/fjHPx7z/dtuu23034sXL1ZVVZVWrFihQ4cOac6cOe/qs2nTJm3cuHH06+7ubtXW1p7rsgAAU8Q5DaANGzbomWee0csvv6yampr3rV2+fLkk6eDBg2cdQIlEQomE7fPaAQBTn2kABUGgO++8U9u3b9fOnTtVX1//gf9n3759kqSqqqpzWiAAYHoyDaDGxkZt27ZNTz31lAoKCtTe3i5JSqVSysnJ0aFDh7Rt2zb98R//sUpKSrR//37dfffduuqqq7RkyZIJ2QAAwNRkGkBbtmyR9Ps3m/53jz76qG6++WbF43G98MILeuihh9TX16fa2lqtW7dOX/7yl8dtwQCA6cH8J7j3U1tbq+bm5vNa0NsioUDhkFsGUmWFe25TQZ4try0acc9hisZMrRWPuq9lRqrI1PvUydPOtfMumWfqnZeXZ6qfPXu2c+2r//knW1fZIOtc29frno8nSak890w16z5JxGzvgIjH3bPmcnJzTb1jMfcbbtT4xo1w2P3+U1rmnu0mSZdeeqlzbUlJiam3LWVwYo2MjPhewoQhCw4A4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4MU5fx7QRCsuKlQk4hY/MvuiWc5941HrJqedK2MRW3xHPOa+lqqKClPvAwd+5Vx7pueMqffChQtN9cPDw861Q4ODpt6RsPvvUIOZjKl3xlAfZN0jgX7f21g/4h5pk0m7729JShhuh8Uziky95y2Y71xbV1dn6p2X6x5/ZIlskqRsxn1/Sx8cU/bfhUK2x4lwyP02HpLtNm4RZN230bWWMyAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAF5M2C+76a/9YyUTCqbaqosy5ryFWSZIUjSadayPhmK25Yf4XFBaYOn/sY8uca5/b8byp9949e031p0+ddq7Njtgyu4pyc51rF198sal3Iu52+5MkGbLAJCmZNPSWlJPjfjtMpVKm3pWVlRNSK0m5ee55bVYDA+65gVFjBqQ1r82Wk2a7jVvWEjE+wEUCw3YatjFMFhwAYDJjAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALyYtFE8CxcuUJ5jzEo47D5HY/G4aR2xmHu8TiQSMfW2xINYo0Tq59Q7166NrzX13r17t6m+LdHmXGvZ35K0aMEC59pLL73U1NtyPC23QUmKGY9n2LAW6z6MG+4TgTFyaGBgwLl2cNA9WscskzGVx6K2fRiOuh+f7IhtHwYaMVVbWBKHAkNv11rOgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeTNosuCAkZR1zisIRwxwNG8KPJMmQ8RUyZsGFDPlRIcs2ypZNNnfePFPvmpoaU31XV5dzrTUjLdcxL1A6l0PvfnysOYBW2ax7lpktDcz2H7LZrKl1bn6ec208mTT1Hk4PO9dmM7Z1Z4170bJfMsbeI4ZjnzZm3o0Y1m3p7VrLGRAAwAvTANqyZYuWLFmiwsJCFRYWqqGhQT/60Y9Grx8cHFRjY6NKSkqUn5+vdevWqaOjY9wXDQCY+kwDqKamRg888ID27t2rPXv26JprrtF1112nX/3qV5Kku+++W08//bSeeOIJNTc36+jRo7rhhhsmZOEAgKnN9Af3a6+9dszXf//3f68tW7Zo9+7dqqmp0SOPPKJt27bpmmuukSQ9+uijuuSSS7R7925dccUV47dqAMCUd87PAWUyGT3++OPq6+tTQ0OD9u7dq3Q6rZUrV47WLFiwQHV1ddq1a9d79hkaGlJ3d/eYCwBg+jMPoF/+8pfKz89XIpHQ7bffru3bt2vhwoVqb29XPB5XUVHRmPqKigq1t7e/Z7+mpialUqnRS21trXkjAABTj3kAzZ8/X/v27dMrr7yiO+64Q+vXr9evf/3rc17Apk2b1NXVNXppa3P/+GYAwNRlfh9QPB7X3LlzJUnLli3Tf/zHf+ib3/ymbrzxRg0PD6uzs3PMWVBHR4cqKyvfs18ikVAikbCvHAAwpZ33+4Cy2ayGhoa0bNkyxWIx7dixY/S6lpYWHT58WA0NDef7YwAA04zpDGjTpk1au3at6urq1NPTo23btmnnzp167rnnlEqldMstt2jjxo0qLi5WYWGh7rzzTjU0NPAKOADAu5gG0PHjx/Vnf/ZnOnbsmFKplJYsWaLnnntOf/RHfyRJ+sY3vqFwOKx169ZpaGhIq1ev1ne+851zWlhxWZny89xiPCwJK9GY7a+OsWjMuTZsjGOJWGJ+DLWSNDzsHlMyNDho6m2NnZkxY4ap3sIUUWPM4snIPaYksCW92G8r8bhzbdoQUSPZYpuygS3qxRKBE42739ck2/FJJG33e+vjRGbEfb9kjHFGOXnucVMjhnVItnWPjIw41/b19zvVmfbyI4888r7XJ5NJbd68WZs3b7a0BQB8CJEFBwDwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8MKchj3RgiCQ5B7lIE1wFE/EEsVjm+eTJYpneDht6m2JbpH+65hOBEsUT8gYxWMRDtn2SThsjOIx3LbSI8bjaVi7tbcpiidqu28ODw8510YiUzeKJ5txj8AxR/EYjo8liqf/Px+/P+i+Hwom8tHhHBw5coQPpQOAaaCtrU01NTXvef2kG0DZbFZHjx5VQUGBQqH/+o21u7tbtbW1amtrU2FhoccVTiy2c/r4MGyjxHZON+OxnUEQqKenR9XV1e/7F5NJ9ye4cDj8vhOzsLBwWh/8t7Gd08eHYRsltnO6Od/tTKVSH1jDixAAAF4wgAAAXkyZAZRIJHTvvfcqkUj4XsqEYjunjw/DNkps53RzIbdz0r0IAQDw4TBlzoAAANMLAwgA4AUDCADgBQMIAODFlBlAmzdv1kUXXaRkMqnly5frZz/7me8ljauvfvWrCoVCYy4LFizwvazz8vLLL+vaa69VdXW1QqGQnnzyyTHXB0Gge+65R1VVVcrJydHKlSv1+uuv+1nsefig7bz55pvfdWzXrFnjZ7HnqKmpSZdddpkKCgpUXl6u66+/Xi0tLWNqBgcH1djYqJKSEuXn52vdunXq6OjwtOJz47KdV1999buO5+233+5pxedmy5YtWrJkyeibTRsaGvSjH/1o9PoLdSynxAD6/ve/r40bN+ree+/Vz3/+cy1dulSrV6/W8ePHfS9tXF166aU6duzY6OXHP/6x7yWdl76+Pi1dulSbN28+6/UPPvigvvWtb+nhhx/WK6+8ory8PK1evVqDg4MXeKXn54O2U5LWrFkz5tg+9thjF3CF56+5uVmNjY3avXu3nn/+eaXTaa1atUp9fX2jNXfffbeefvppPfHEE2pubtbRo0d1ww03eFy1nct2StKtt9465ng++OCDnlZ8bmpqavTAAw9o79692rNnj6655hpdd911+tWvfiXpAh7LYAq4/PLLg8bGxtGvM5lMUF1dHTQ1NXlc1fi69957g6VLl/pexoSRFGzfvn3062w2G1RWVgZf+9rXRr/X2dkZJBKJ4LHHHvOwwvHxzu0MgiBYv359cN1113lZz0Q5fvx4IClobm4OguD3xy4WiwVPPPHEaM1rr70WSAp27drla5nn7Z3bGQRB8Id/+IfBX/7lX/pb1ASZMWNG8I//+I8X9FhO+jOg4eFh7d27VytXrhz9Xjgc1sqVK7Vr1y6PKxt/r7/+uqqrqzV79mx99rOf1eHDh30vacK0traqvb19zHFNpVJavnz5tDuukrRz506Vl5dr/vz5uuOOO3Tq1CnfSzovXV1dkqTi4mJJ0t69e5VOp8cczwULFqiurm5KH893bufbvve976m0tFSLFi3Spk2bRj9+YCrKZDJ6/PHH1dfXp4aGhgt6LCddGOk7nTx5UplMRhUVFWO+X1FRod/85jeeVjX+li9frq1bt2r+/Pk6duyY7rvvPn3iE5/QgQMHVFBQ4Ht54669vV2Sznpc375uulizZo1uuOEG1dfX69ChQ/qbv/kbrV27Vrt27VIkYvtcoMkgm83qrrvu0pVXXqlFixZJ+v3xjMfjKioqGlM7lY/n2bZTkj7zmc9o1qxZqq6u1v79+/XFL35RLS0t+uEPf+hxtXa//OUv1dDQoMHBQeXn52v79u1auHCh9u3bd8GO5aQfQB8Wa9euHf33kiVLtHz5cs2aNUs/+MEPdMstt3hcGc7XTTfdNPrvxYsXa8mSJZozZ4527typFStWeFzZuWlsbNSBAwem/HOUH+S9tvO2224b/ffixYtVVVWlFStW6NChQ5ozZ86FXuY5mz9/vvbt26euri79y7/8i9avX6/m5uYLuoZJ/ye40tJSRSKRd70Co6OjQ5WVlZ5WNfGKiop08cUX6+DBg76XMiHePnYftuMqSbNnz1ZpaemUPLYbNmzQM888o5deemnMx6ZUVlZqeHhYnZ2dY+qn6vF8r+08m+XLl0vSlDue8Xhcc+fO1bJly9TU1KSlS5fqm9/85gU9lpN+AMXjcS1btkw7duwY/V42m9WOHTvU0NDgcWUTq7e3V4cOHVJVVZXvpUyI+vp6VVZWjjmu3d3deuWVV6b1cZV+/6m/p06dmlLHNggCbdiwQdu3b9eLL76o+vr6MdcvW7ZMsVhszPFsaWnR4cOHp9Tx/KDtPJt9+/ZJ0pQ6nmeTzWY1NDR0YY/luL6kYYI8/vjjQSKRCLZu3Rr8+te/Dm677bagqKgoaG9v9720cfNXf/VXwc6dO4PW1tbgJz/5SbBy5cqgtLQ0OH78uO+lnbOenp7g1VdfDV599dVAUvD1r389ePXVV4M333wzCIIgeOCBB4KioqLgqaeeCvbv3x9cd911QX19fTAwMOB55Tbvt509PT3B5z//+WDXrl1Ba2tr8MILLwQf/ehHg3nz5gWDg4O+l+7sjjvuCFKpVLBz587g2LFjo5f+/v7Rmttvvz2oq6sLXnzxxWDPnj1BQ0ND0NDQ4HHVdh+0nQcPHgzuv//+YM+ePUFra2vw1FNPBbNnzw6uuuoqzyu3+dKXvhQ0NzcHra2twf79+4MvfelLQSgUCv793/89CIILdyynxAAKgiD49re/HdTV1QXxeDy4/PLLg927d/te0ri68cYbg6qqqiAejwczZ84MbrzxxuDgwYO+l3VeXnrppUDSuy7r168PguD3L8X+yle+ElRUVASJRCJYsWJF0NLS4nfR5+D9trO/vz9YtWpVUFZWFsRisWDWrFnBrbfeOuV+eTrb9kkKHn300dGagYGB4C/+4i+CGTNmBLm5ucGnPvWp4NixY/4WfQ4+aDsPHz4cXHXVVUFxcXGQSCSCuXPnBn/9138ddHV1+V240Z//+Z8Hs2bNCuLxeFBWVhasWLFidPgEwYU7lnwcAwDAi0n/HBAAYHpiAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8+P97tlGt2lCeiQAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "imshow(X_data[pri_risk_rank[idx]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 100,
   "id": "9a280b28-71e2-4ac0-be7a-cab54c5af113",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "6"
      ]
     },
     "execution_count": 100,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Y_data[pri_risk_rank[idx]]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "63075866-0819-4386-8e3d-ee79a873b1fd",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "opacus",
   "language": "python",
   "name": "opacus"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
