{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "6b439e84-8c6b-4126-8586-d7a1a7c7614e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 该代码文件主要为了在Purchase100上执行相关实验，验证模型自蒸馏"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "3a72ff09-8088-4a9d-b1f7-0825ff54c477",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 自蒸馏步骤：\n",
    "# 分割模型训练集，接收比例参数\n",
    "# 对训练集进行多次分割，多个影子数据集\n",
    "# 训练多个模型\n",
    "# 合并多个模型输出\n",
    "# 训练目标模型\n",
    "# 攻击\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "b61f101e-1c1e-41df-b80b-65d1e3d6eab8",
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "from torch import nn\n",
    "from torch.utils.data import DataLoader\n",
    "from torch.utils.data import Dataset\n",
    "from torchvision import datasets\n",
    "from torchvision import transforms\n",
    "from torchvision.transforms import ToTensor\n",
    "import torchvision.transforms as tt\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn import metrics\n",
    "\n",
    "import os"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "37ddaa77-35ce-49b9-acfd-a7799aadd9a5",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 导入自己创建的python文件\n",
    "import sys\n",
    "sys.path.append(\"..\") # Adds higher directory to python modules path.\n",
    "from frame.DataProcess import *\n",
    "from frame.TrainUtil import *\n",
    "from frame.LIRAAttack import *\n",
    "from frame.AttackUtil import *\n",
    "from frame.ShadowAttack import *\n",
    "from frame.ThresholdAttack import *\n",
    "from frame.LabelAttack import *"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "4636a18e-244e-4a21-ba21-591c0295ce7a",
   "metadata": {},
   "outputs": [],
   "source": [
    "LEARNING_RATE = 5e-4\n",
    "BATCH_SIZE = 128\n",
    "MODEL = 'NN_4layer'\n",
    "EPOCHS = 100\n",
    "DATA_NAME = 'Purchase100_limited' \n",
    "weight_dir = os.path.join('..', 'weights_for_exp', DATA_NAME)\n",
    "weight_dir = os.path.join('..', 'weights_for_exp', DATA_NAME)\n",
    "num_shadowsets = 100\n",
    "seed = 0\n",
    "prop_keep = 0.5\n",
    "\n",
    "model_transform = transforms.Compose([])\n",
    "attack_transform = transforms.Compose([])\n",
    "device = \"cuda\" if torch.cuda.is_available() else \"cpu\"\n",
    "\n",
    "# 影子模型攻击相关参数\n",
    "sha_models = [1,2,3] #[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30]\n",
    "tar_model = 0\n",
    "attack_class = False #是否针对每个类别分别攻击\n",
    "attack_lr = 5e-4"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "e8818f5c-0cf8-4635-a3d6-292f91f14d75",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 加载完整的训练数据集\n",
    "X_data, Y_data, train_keep = load_Purchase100_limited_keep(num_shadowsets, prop_keep, seed)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "1754e859-f23b-4a3c-8666-b2ce06c2b8f8",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 创建对应的dotaloader\n",
    "all_data = CustomDataset(X_data, Y_data, model_transform)\n",
    "all_dataloader = DataLoader(all_data, batch_size=BATCH_SIZE, shuffle=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "0e839f4d-93e7-48f0-af07-349ebef049a2",
   "metadata": {},
   "outputs": [],
   "source": [
    "batch_size = BATCH_SIZE\n",
    "model = MODEL\n",
    "epochs = EPOCHS\n",
    "data_name = DATA_NAME \n",
    "weight_part = \"{}_{}_epoch{}_model\".format(data_name, model, epochs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "5ef57ecf-b753-43a3-ae9a-f6a1f05171dc",
   "metadata": {},
   "outputs": [],
   "source": [
    "loss_data_all = np.load('../outputs_save/Purchase100_limited_loss.npy')\n",
    "score_all = np.load('../outputs_save/Purchase100_limited_score.npy')\n",
    "conf_data_all = np.load('../outputs_save/Purchase100_limited_conf.npy')\n",
    "pri_risk_all = get_risk_score(loss_data_all, train_keep)\n",
    "pri_risk_rank = np.argsort(pri_risk_all)\n",
    "pri_risk_rank = np.flip(pri_risk_rank)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3a936cf8-20c8-463d-b4b2-72838ae2e276",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "5a3619ae-0096-4771-848f-2523774f3570",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(30066, 600) (30066,) (29934, 600) (29934,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 86.1%  \n",
      "\n",
      "(60000, 100) (60000,) (60000,)\n",
      "(29882, 600) (29882,) (30118, 600) (30118,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 86.5%  \n",
      "\n",
      "(60000, 100) (60000,) (60000,)\n",
      "(29851, 600) (29851,) (30149, 600) (30149,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 85.6%  \n",
      "\n",
      "(60000, 100) (60000,) (60000,)\n",
      "(30013, 600) (30013,) (29987, 600) (29987,)\n",
      " Error: \n",
      " Accuracy: 100.0%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 86.4%  \n",
      "\n",
      "test data: (60000, 100) (60000,) (60000,)\n",
      "(180000, 100) (180000,)\n",
      "Attack_NN(\n",
      "  (linear_relu_stack): Sequential(\n",
      "    (0): Linear(in_features=3, out_features=128, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Linear(in_features=128, out_features=64, bias=True)\n",
      "    (3): ReLU()\n",
      "    (4): Linear(in_features=64, out_features=1, bias=True)\n",
      "  )\n",
      ")\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 0.690621  [  128/180000]\n",
      "loss: 0.683830  [12928/180000]\n",
      "loss: 0.659851  [25728/180000]\n",
      "loss: 0.645241  [38528/180000]\n",
      "loss: 0.634166  [51328/180000]\n",
      "loss: 0.621733  [64128/180000]\n",
      "loss: 0.622547  [76928/180000]\n",
      "loss: 0.642757  [89728/180000]\n",
      "loss: 0.607553  [102528/180000]\n",
      "loss: 0.647547  [115328/180000]\n",
      "loss: 0.624473  [128128/180000]\n",
      "loss: 0.624762  [140928/180000]\n",
      "loss: 0.620767  [153728/180000]\n",
      "loss: 0.607987  [166528/180000]\n",
      "loss: 0.579218  [179328/180000]\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 0.614923  [  128/180000]\n",
      "loss: 0.619191  [12928/180000]\n",
      "loss: 0.579756  [25728/180000]\n",
      "loss: 0.607335  [38528/180000]\n",
      "loss: 0.608406  [51328/180000]\n",
      "loss: 0.597871  [64128/180000]\n",
      "loss: 0.583870  [76928/180000]\n",
      "loss: 0.613104  [89728/180000]\n",
      "loss: 0.627438  [102528/180000]\n",
      "loss: 0.584483  [115328/180000]\n",
      "loss: 0.584885  [128128/180000]\n",
      "loss: 0.594818  [140928/180000]\n",
      "loss: 0.591857  [153728/180000]\n",
      "loss: 0.596892  [166528/180000]\n",
      "loss: 0.583033  [179328/180000]\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.585319  [  128/180000]\n",
      "loss: 0.630838  [12928/180000]\n",
      "loss: 0.591579  [25728/180000]\n",
      "loss: 0.587762  [38528/180000]\n",
      "loss: 0.552790  [51328/180000]\n",
      "loss: 0.558459  [64128/180000]\n",
      "loss: 0.560653  [76928/180000]\n",
      "loss: 0.538502  [89728/180000]\n",
      "loss: 0.591651  [102528/180000]\n",
      "loss: 0.549964  [115328/180000]\n",
      "loss: 0.569721  [128128/180000]\n",
      "loss: 0.606539  [140928/180000]\n",
      "loss: 0.578279  [153728/180000]\n",
      "loss: 0.584270  [166528/180000]\n",
      "loss: 0.577168  [179328/180000]\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.611379  [  128/180000]\n",
      "loss: 0.608817  [12928/180000]\n",
      "loss: 0.602268  [25728/180000]\n",
      "loss: 0.536985  [38528/180000]\n",
      "loss: 0.627781  [51328/180000]\n",
      "loss: 0.568742  [64128/180000]\n",
      "loss: 0.579255  [76928/180000]\n",
      "loss: 0.543106  [89728/180000]\n",
      "loss: 0.579228  [102528/180000]\n",
      "loss: 0.557452  [115328/180000]\n",
      "loss: 0.547872  [128128/180000]\n",
      "loss: 0.575106  [140928/180000]\n",
      "loss: 0.542381  [153728/180000]\n",
      "loss: 0.552190  [166528/180000]\n",
      "loss: 0.573732  [179328/180000]\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.601972  [  128/180000]\n",
      "loss: 0.593197  [12928/180000]\n",
      "loss: 0.554068  [25728/180000]\n",
      "loss: 0.510193  [38528/180000]\n",
      "loss: 0.543590  [51328/180000]\n",
      "loss: 0.572922  [64128/180000]\n",
      "loss: 0.602140  [76928/180000]\n",
      "loss: 0.550893  [89728/180000]\n",
      "loss: 0.563142  [102528/180000]\n",
      "loss: 0.545704  [115328/180000]\n",
      "loss: 0.557991  [128128/180000]\n",
      "loss: 0.581099  [140928/180000]\n",
      "loss: 0.587665  [153728/180000]\n",
      "loss: 0.560618  [166528/180000]\n",
      "loss: 0.599825  [179328/180000]\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.547983  [  128/180000]\n",
      "loss: 0.576555  [12928/180000]\n",
      "loss: 0.563149  [25728/180000]\n",
      "loss: 0.564674  [38528/180000]\n",
      "loss: 0.590421  [51328/180000]\n",
      "loss: 0.528138  [64128/180000]\n",
      "loss: 0.567444  [76928/180000]\n",
      "loss: 0.528558  [89728/180000]\n",
      "loss: 0.552620  [102528/180000]\n",
      "loss: 0.540786  [115328/180000]\n",
      "loss: 0.567034  [128128/180000]\n",
      "loss: 0.530778  [140928/180000]\n",
      "loss: 0.519033  [153728/180000]\n",
      "loss: 0.558758  [166528/180000]\n",
      "loss: 0.559215  [179328/180000]\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.509995  [  128/180000]\n",
      "loss: 0.604972  [12928/180000]\n",
      "loss: 0.542072  [25728/180000]\n",
      "loss: 0.595630  [38528/180000]\n",
      "loss: 0.556282  [51328/180000]\n",
      "loss: 0.532098  [64128/180000]\n",
      "loss: 0.579264  [76928/180000]\n",
      "loss: 0.557634  [89728/180000]\n",
      "loss: 0.583670  [102528/180000]\n",
      "loss: 0.545914  [115328/180000]\n",
      "loss: 0.539085  [128128/180000]\n",
      "loss: 0.571396  [140928/180000]\n",
      "loss: 0.574604  [153728/180000]\n",
      "loss: 0.585494  [166528/180000]\n",
      "loss: 0.529218  [179328/180000]\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.529823  [  128/180000]\n",
      "loss: 0.508295  [12928/180000]\n",
      "loss: 0.527677  [25728/180000]\n",
      "loss: 0.489797  [38528/180000]\n",
      "loss: 0.527882  [51328/180000]\n",
      "loss: 0.509400  [64128/180000]\n",
      "loss: 0.569651  [76928/180000]\n",
      "loss: 0.557326  [89728/180000]\n",
      "loss: 0.483185  [102528/180000]\n",
      "loss: 0.564086  [115328/180000]\n",
      "loss: 0.532684  [128128/180000]\n",
      "loss: 0.579049  [140928/180000]\n",
      "loss: 0.548125  [153728/180000]\n",
      "loss: 0.555399  [166528/180000]\n",
      "loss: 0.577103  [179328/180000]\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.482145  [  128/180000]\n",
      "loss: 0.504458  [12928/180000]\n",
      "loss: 0.546583  [25728/180000]\n",
      "loss: 0.544280  [38528/180000]\n",
      "loss: 0.526487  [51328/180000]\n",
      "loss: 0.592122  [64128/180000]\n",
      "loss: 0.588660  [76928/180000]\n",
      "loss: 0.528637  [89728/180000]\n",
      "loss: 0.553460  [102528/180000]\n",
      "loss: 0.576653  [115328/180000]\n",
      "loss: 0.534894  [128128/180000]\n",
      "loss: 0.545715  [140928/180000]\n",
      "loss: 0.476418  [153728/180000]\n",
      "loss: 0.536083  [166528/180000]\n",
      "loss: 0.562448  [179328/180000]\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.508954  [  128/180000]\n",
      "loss: 0.568493  [12928/180000]\n",
      "loss: 0.530145  [25728/180000]\n",
      "loss: 0.612236  [38528/180000]\n",
      "loss: 0.531517  [51328/180000]\n",
      "loss: 0.578179  [64128/180000]\n",
      "loss: 0.566111  [76928/180000]\n",
      "loss: 0.552012  [89728/180000]\n",
      "loss: 0.575717  [102528/180000]\n",
      "loss: 0.533356  [115328/180000]\n",
      "loss: 0.496514  [128128/180000]\n",
      "loss: 0.540853  [140928/180000]\n",
      "loss: 0.537118  [153728/180000]\n",
      "loss: 0.548333  [166528/180000]\n",
      "loss: 0.580478  [179328/180000]\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.524569  [  128/180000]\n",
      "loss: 0.547821  [12928/180000]\n",
      "loss: 0.542075  [25728/180000]\n",
      "loss: 0.549752  [38528/180000]\n",
      "loss: 0.545990  [51328/180000]\n",
      "loss: 0.539002  [64128/180000]\n",
      "loss: 0.564517  [76928/180000]\n",
      "loss: 0.526591  [89728/180000]\n",
      "loss: 0.514341  [102528/180000]\n",
      "loss: 0.469263  [115328/180000]\n",
      "loss: 0.586225  [128128/180000]\n",
      "loss: 0.484827  [140928/180000]\n",
      "loss: 0.588565  [153728/180000]\n",
      "loss: 0.515946  [166528/180000]\n",
      "loss: 0.550329  [179328/180000]\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.541306  [  128/180000]\n",
      "loss: 0.503701  [12928/180000]\n",
      "loss: 0.552176  [25728/180000]\n",
      "loss: 0.559882  [38528/180000]\n",
      "loss: 0.528528  [51328/180000]\n",
      "loss: 0.553050  [64128/180000]\n",
      "loss: 0.507711  [76928/180000]\n",
      "loss: 0.579738  [89728/180000]\n",
      "loss: 0.531904  [102528/180000]\n",
      "loss: 0.548100  [115328/180000]\n",
      "loss: 0.505616  [128128/180000]\n",
      "loss: 0.534694  [140928/180000]\n",
      "loss: 0.516797  [153728/180000]\n",
      "loss: 0.568379  [166528/180000]\n",
      "loss: 0.555414  [179328/180000]\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.481215  [  128/180000]\n",
      "loss: 0.578459  [12928/180000]\n",
      "loss: 0.577528  [25728/180000]\n",
      "loss: 0.505127  [38528/180000]\n",
      "loss: 0.532463  [51328/180000]\n",
      "loss: 0.503686  [64128/180000]\n",
      "loss: 0.505028  [76928/180000]\n",
      "loss: 0.529253  [89728/180000]\n",
      "loss: 0.580043  [102528/180000]\n",
      "loss: 0.545311  [115328/180000]\n",
      "loss: 0.536084  [128128/180000]\n",
      "loss: 0.531966  [140928/180000]\n",
      "loss: 0.511022  [153728/180000]\n",
      "loss: 0.525732  [166528/180000]\n",
      "loss: 0.536036  [179328/180000]\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.503111  [  128/180000]\n",
      "loss: 0.579593  [12928/180000]\n",
      "loss: 0.539371  [25728/180000]\n",
      "loss: 0.542942  [38528/180000]\n",
      "loss: 0.605962  [51328/180000]\n",
      "loss: 0.541489  [64128/180000]\n",
      "loss: 0.529780  [76928/180000]\n",
      "loss: 0.536147  [89728/180000]\n",
      "loss: 0.534202  [102528/180000]\n",
      "loss: 0.534680  [115328/180000]\n",
      "loss: 0.592704  [128128/180000]\n",
      "loss: 0.563276  [140928/180000]\n",
      "loss: 0.541400  [153728/180000]\n",
      "loss: 0.535039  [166528/180000]\n",
      "loss: 0.483987  [179328/180000]\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.576036  [  128/180000]\n",
      "loss: 0.555011  [12928/180000]\n",
      "loss: 0.541922  [25728/180000]\n",
      "loss: 0.513131  [38528/180000]\n",
      "loss: 0.576969  [51328/180000]\n",
      "loss: 0.512406  [64128/180000]\n",
      "loss: 0.498947  [76928/180000]\n",
      "loss: 0.525486  [89728/180000]\n",
      "loss: 0.534051  [102528/180000]\n",
      "loss: 0.574314  [115328/180000]\n",
      "loss: 0.500412  [128128/180000]\n",
      "loss: 0.555217  [140928/180000]\n",
      "loss: 0.506874  [153728/180000]\n",
      "loss: 0.513397  [166528/180000]\n",
      "loss: 0.504067  [179328/180000]\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.513513  [  128/180000]\n",
      "loss: 0.607010  [12928/180000]\n",
      "loss: 0.449133  [25728/180000]\n",
      "loss: 0.618431  [38528/180000]\n",
      "loss: 0.550299  [51328/180000]\n",
      "loss: 0.519039  [64128/180000]\n",
      "loss: 0.530545  [76928/180000]\n",
      "loss: 0.551743  [89728/180000]\n",
      "loss: 0.527685  [102528/180000]\n",
      "loss: 0.553108  [115328/180000]\n",
      "loss: 0.559433  [128128/180000]\n",
      "loss: 0.505650  [140928/180000]\n",
      "loss: 0.509376  [153728/180000]\n",
      "loss: 0.510385  [166528/180000]\n",
      "loss: 0.527860  [179328/180000]\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.561115  [  128/180000]\n",
      "loss: 0.546106  [12928/180000]\n",
      "loss: 0.545610  [25728/180000]\n",
      "loss: 0.568017  [38528/180000]\n",
      "loss: 0.540346  [51328/180000]\n",
      "loss: 0.604100  [64128/180000]\n",
      "loss: 0.531858  [76928/180000]\n",
      "loss: 0.511593  [89728/180000]\n",
      "loss: 0.512937  [102528/180000]\n",
      "loss: 0.574728  [115328/180000]\n",
      "loss: 0.525287  [128128/180000]\n",
      "loss: 0.514102  [140928/180000]\n",
      "loss: 0.481934  [153728/180000]\n",
      "loss: 0.510439  [166528/180000]\n",
      "loss: 0.564601  [179328/180000]\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.554055  [  128/180000]\n",
      "loss: 0.540819  [12928/180000]\n",
      "loss: 0.624277  [25728/180000]\n",
      "loss: 0.538042  [38528/180000]\n",
      "loss: 0.523327  [51328/180000]\n",
      "loss: 0.535986  [64128/180000]\n",
      "loss: 0.548628  [76928/180000]\n",
      "loss: 0.588596  [89728/180000]\n",
      "loss: 0.469034  [102528/180000]\n",
      "loss: 0.518097  [115328/180000]\n",
      "loss: 0.556880  [128128/180000]\n",
      "loss: 0.504610  [140928/180000]\n",
      "loss: 0.567371  [153728/180000]\n",
      "loss: 0.506874  [166528/180000]\n",
      "loss: 0.576366  [179328/180000]\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.512035  [  128/180000]\n",
      "loss: 0.478464  [12928/180000]\n",
      "loss: 0.556304  [25728/180000]\n",
      "loss: 0.476204  [38528/180000]\n",
      "loss: 0.522668  [51328/180000]\n",
      "loss: 0.491852  [64128/180000]\n",
      "loss: 0.524850  [76928/180000]\n",
      "loss: 0.560936  [89728/180000]\n",
      "loss: 0.484445  [102528/180000]\n",
      "loss: 0.523758  [115328/180000]\n",
      "loss: 0.573708  [128128/180000]\n",
      "loss: 0.615925  [140928/180000]\n",
      "loss: 0.535307  [153728/180000]\n",
      "loss: 0.501660  [166528/180000]\n",
      "loss: 0.522235  [179328/180000]\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.551128  [  128/180000]\n",
      "loss: 0.569722  [12928/180000]\n",
      "loss: 0.534226  [25728/180000]\n",
      "loss: 0.517919  [38528/180000]\n",
      "loss: 0.499005  [51328/180000]\n",
      "loss: 0.543572  [64128/180000]\n",
      "loss: 0.491420  [76928/180000]\n",
      "loss: 0.507011  [89728/180000]\n",
      "loss: 0.618179  [102528/180000]\n",
      "loss: 0.501604  [115328/180000]\n",
      "loss: 0.539229  [128128/180000]\n",
      "loss: 0.465266  [140928/180000]\n",
      "loss: 0.467985  [153728/180000]\n",
      "loss: 0.486892  [166528/180000]\n",
      "loss: 0.483456  [179328/180000]\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.521237  [  128/180000]\n",
      "loss: 0.543971  [12928/180000]\n",
      "loss: 0.467969  [25728/180000]\n",
      "loss: 0.484623  [38528/180000]\n",
      "loss: 0.451254  [51328/180000]\n",
      "loss: 0.556858  [64128/180000]\n",
      "loss: 0.517443  [76928/180000]\n",
      "loss: 0.566920  [89728/180000]\n",
      "loss: 0.556925  [102528/180000]\n",
      "loss: 0.508331  [115328/180000]\n",
      "loss: 0.527779  [128128/180000]\n",
      "loss: 0.527849  [140928/180000]\n",
      "loss: 0.523064  [153728/180000]\n",
      "loss: 0.446488  [166528/180000]\n",
      "loss: 0.540837  [179328/180000]\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.508619  [  128/180000]\n",
      "loss: 0.511233  [12928/180000]\n",
      "loss: 0.528228  [25728/180000]\n",
      "loss: 0.513354  [38528/180000]\n",
      "loss: 0.460480  [51328/180000]\n",
      "loss: 0.513961  [64128/180000]\n",
      "loss: 0.484993  [76928/180000]\n",
      "loss: 0.518205  [89728/180000]\n",
      "loss: 0.525700  [102528/180000]\n",
      "loss: 0.541093  [115328/180000]\n",
      "loss: 0.527819  [128128/180000]\n",
      "loss: 0.519239  [140928/180000]\n",
      "loss: 0.546288  [153728/180000]\n",
      "loss: 0.515256  [166528/180000]\n",
      "loss: 0.501528  [179328/180000]\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.483745  [  128/180000]\n",
      "loss: 0.538743  [12928/180000]\n",
      "loss: 0.433041  [25728/180000]\n",
      "loss: 0.480576  [38528/180000]\n",
      "loss: 0.576786  [51328/180000]\n",
      "loss: 0.528383  [64128/180000]\n",
      "loss: 0.497887  [76928/180000]\n",
      "loss: 0.518151  [89728/180000]\n",
      "loss: 0.515598  [102528/180000]\n",
      "loss: 0.538927  [115328/180000]\n",
      "loss: 0.489429  [128128/180000]\n",
      "loss: 0.517653  [140928/180000]\n",
      "loss: 0.557481  [153728/180000]\n",
      "loss: 0.446206  [166528/180000]\n",
      "loss: 0.582551  [179328/180000]\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.516745  [  128/180000]\n",
      "loss: 0.550250  [12928/180000]\n",
      "loss: 0.536840  [25728/180000]\n",
      "loss: 0.448132  [38528/180000]\n",
      "loss: 0.534744  [51328/180000]\n",
      "loss: 0.579082  [64128/180000]\n",
      "loss: 0.547367  [76928/180000]\n",
      "loss: 0.566803  [89728/180000]\n",
      "loss: 0.481946  [102528/180000]\n",
      "loss: 0.497183  [115328/180000]\n",
      "loss: 0.498235  [128128/180000]\n",
      "loss: 0.567713  [140928/180000]\n",
      "loss: 0.558001  [153728/180000]\n",
      "loss: 0.576597  [166528/180000]\n",
      "loss: 0.498419  [179328/180000]\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.530716  [  128/180000]\n",
      "loss: 0.499491  [12928/180000]\n",
      "loss: 0.575294  [25728/180000]\n",
      "loss: 0.480216  [38528/180000]\n",
      "loss: 0.493986  [51328/180000]\n",
      "loss: 0.541892  [64128/180000]\n",
      "loss: 0.518477  [76928/180000]\n",
      "loss: 0.495835  [89728/180000]\n",
      "loss: 0.540427  [102528/180000]\n",
      "loss: 0.504759  [115328/180000]\n",
      "loss: 0.512952  [128128/180000]\n",
      "loss: 0.502157  [140928/180000]\n",
      "loss: 0.538186  [153728/180000]\n",
      "loss: 0.534909  [166528/180000]\n",
      "loss: 0.466597  [179328/180000]\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.488762  [  128/180000]\n",
      "loss: 0.501345  [12928/180000]\n",
      "loss: 0.536083  [25728/180000]\n",
      "loss: 0.564186  [38528/180000]\n",
      "loss: 0.554640  [51328/180000]\n",
      "loss: 0.515102  [64128/180000]\n",
      "loss: 0.536930  [76928/180000]\n",
      "loss: 0.506278  [89728/180000]\n",
      "loss: 0.504528  [102528/180000]\n",
      "loss: 0.522334  [115328/180000]\n",
      "loss: 0.565248  [128128/180000]\n",
      "loss: 0.510614  [140928/180000]\n",
      "loss: 0.463734  [153728/180000]\n",
      "loss: 0.513100  [166528/180000]\n",
      "loss: 0.546369  [179328/180000]\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.533647  [  128/180000]\n",
      "loss: 0.548771  [12928/180000]\n",
      "loss: 0.512821  [25728/180000]\n",
      "loss: 0.502665  [38528/180000]\n",
      "loss: 0.498931  [51328/180000]\n",
      "loss: 0.488743  [64128/180000]\n",
      "loss: 0.529110  [76928/180000]\n",
      "loss: 0.592605  [89728/180000]\n",
      "loss: 0.514264  [102528/180000]\n",
      "loss: 0.518997  [115328/180000]\n",
      "loss: 0.509279  [128128/180000]\n",
      "loss: 0.562135  [140928/180000]\n",
      "loss: 0.544280  [153728/180000]\n",
      "loss: 0.567514  [166528/180000]\n",
      "loss: 0.510734  [179328/180000]\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.547215  [  128/180000]\n",
      "loss: 0.538731  [12928/180000]\n",
      "loss: 0.586372  [25728/180000]\n",
      "loss: 0.542937  [38528/180000]\n",
      "loss: 0.600373  [51328/180000]\n",
      "loss: 0.504726  [64128/180000]\n",
      "loss: 0.491388  [76928/180000]\n",
      "loss: 0.541098  [89728/180000]\n",
      "loss: 0.501558  [102528/180000]\n",
      "loss: 0.489756  [115328/180000]\n",
      "loss: 0.540865  [128128/180000]\n",
      "loss: 0.488379  [140928/180000]\n",
      "loss: 0.498630  [153728/180000]\n",
      "loss: 0.483334  [166528/180000]\n",
      "loss: 0.512873  [179328/180000]\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.532199  [  128/180000]\n",
      "loss: 0.516756  [12928/180000]\n",
      "loss: 0.517721  [25728/180000]\n",
      "loss: 0.574375  [38528/180000]\n",
      "loss: 0.499360  [51328/180000]\n",
      "loss: 0.485391  [64128/180000]\n",
      "loss: 0.497654  [76928/180000]\n",
      "loss: 0.553834  [89728/180000]\n",
      "loss: 0.540787  [102528/180000]\n",
      "loss: 0.516348  [115328/180000]\n",
      "loss: 0.496901  [128128/180000]\n",
      "loss: 0.555329  [140928/180000]\n",
      "loss: 0.522335  [153728/180000]\n",
      "loss: 0.470982  [166528/180000]\n",
      "loss: 0.499128  [179328/180000]\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.487697  [  128/180000]\n",
      "loss: 0.493302  [12928/180000]\n",
      "loss: 0.549909  [25728/180000]\n",
      "loss: 0.568664  [38528/180000]\n",
      "loss: 0.514703  [51328/180000]\n",
      "loss: 0.460947  [64128/180000]\n",
      "loss: 0.542186  [76928/180000]\n",
      "loss: 0.523351  [89728/180000]\n",
      "loss: 0.526627  [102528/180000]\n",
      "loss: 0.472837  [115328/180000]\n",
      "loss: 0.486800  [128128/180000]\n",
      "loss: 0.475311  [140928/180000]\n",
      "loss: 0.464819  [153728/180000]\n",
      "loss: 0.536089  [166528/180000]\n",
      "loss: 0.511165  [179328/180000]\n",
      "Done!\n",
      "Train data:\n",
      "AUC value is: 0.7083975864778878\n",
      "Accuracy is: 0.7089444444444445\n",
      "Test data:\n",
      "AUC value is: 0.7160716116845582\n",
      "Accuracy is: 0.67615\n"
     ]
    }
   ],
   "source": [
    "# 训练影子攻击模型\n",
    "attack_model = shadow_attack(sha_models=sha_models, tar_model=tar_model, model_num=num_shadowsets, weight_dir=weight_dir, data_name=DATA_NAME, model=MODEL, model_transform=model_transform, \n",
    "                  model_epochs=EPOCHS, batch_size=BATCH_SIZE, learning_rate=attack_lr, attack_epochs=30, attack_transform=attack_transform, \n",
    "                  device=device, prop_keep=0.5, top_k=3, attack_class=attack_class)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c41f1a6e-f0de-4bf3-bf89-b31b81b37964",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "6cf49ce4-482f-4544-b632-1d8193afc222",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 指定蒸馏手段的目标数据集\n",
    "mem_label = train_keep[0]\n",
    "mem_data = np.where(mem_label==True)[0]\n",
    "train_num = mem_label.sum()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "505a8b97-2514-4e2a-a69f-8b610398affc",
   "metadata": {},
   "outputs": [],
   "source": [
    "dist_num = 50\n",
    "np.random.seed(seed)\n",
    "keep_matrix = np.random.uniform(0,1,size=(dist_num, train_num))\n",
    "order = keep_matrix.argsort(0)\n",
    "dist_keep = order < int(prop_keep * dist_num)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "e208aa42-6643-4679-bfb9-061494c3d833",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# 批量训练蒸馏子模型\n",
    "# for i in range(dist_num):\n",
    "#     dis_train = mem_data[dist_keep[i]]\n",
    "#     x = X_data[dis_train]\n",
    "#     y = Y_data[dis_train]\n",
    "#     train_data = CustomDataset(x, y, model_transform)\n",
    "#     train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "#     ReferenceModel = globals()['create_{}_model'.format(model)](x.shape[1], y.max()+1)\n",
    "#     ReferenceModel.to(device)\n",
    "#     loss_fn = nn.CrossEntropyLoss()\n",
    "#     optimizer = torch.optim.Adam(ReferenceModel.parameters(), lr=LEARNING_RATE)\n",
    "#     for t in range(epochs):\n",
    "#         print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "#         train(train_dataloader, ReferenceModel, loss_fn, optimizer, device)\n",
    "#     print(\"Done!\")\n",
    "#     weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_dist0_model{}.pth\".format(data_name, model, epochs, i))\n",
    "#     torch.save(ReferenceModel.state_dict(), weight_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e9670e96-14d2-4b8b-a071-8adeecfeda39",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "a3ab56f4-c3ad-4435-b211-5630f90d03b8",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fd4c5af1-e3c8-40ef-91d4-feccf70c701d",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "da45bfa9-fd37-4386-8937-ea857f86b9f9",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = X_data[mem_label]\n",
    "y = Y_data[mem_label]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "2bd26053-2535-4b0d-83bf-5d37e7739722",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " Error: \n",
      " Accuracy: 90.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 91.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.9%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.7%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.8%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 90.6%  \n",
      "\n"
     ]
    }
   ],
   "source": [
    "# 加载目标数据在所有蒸馏数据上的输出\n",
    "weight_part = \"{}_{}_epoch{}_dist0_model\".format(data_name, model, epochs)\n",
    "conf_data_train, label_data_train, _ = load_score_data_all(x, y, weight_dir, dist_num, data_name, model, weight_part, model_transform, batch_size, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "34907a72-da6e-4d85-9714-643f0214558e",
   "metadata": {},
   "outputs": [],
   "source": [
    "conf_in = []\n",
    "conf_out = []\n",
    "for i in range(conf_data_train.shape[1]):\n",
    "    conf_in.append((conf_data_train[dist_keep[:,i],i]))\n",
    "    conf_out.append((conf_data_train[~dist_keep[:,i],i]))\n",
    "conf_in = np.array(conf_in)\n",
    "conf_out = np.array(conf_out)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "d6e96ff9-8451-4de5-a706-dc95149569ac",
   "metadata": {},
   "outputs": [],
   "source": [
    "conf_in_mean = np.median(conf_in, 1)\n",
    "conf_out_mean = np.median(conf_out, 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "432b54fa-a658-4b22-8ca9-8b8845b8a882",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 混合策略研究\n",
    "# 基于隐私风险混合\n",
    "# 加载隐私风险"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "2b26cdca-b753-443c-b7da-845398d281ce",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(60000,)"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pri_risk_all.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "3b5b8330-ed31-43aa-8fed-d31c0232436f",
   "metadata": {},
   "outputs": [],
   "source": [
    "y_onehot = np.eye(100, dtype=np.float64)[Y_data[mem_label]]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "18269fc9-92e6-4fac-92bb-a145cdc84050",
   "metadata": {},
   "outputs": [],
   "source": [
    "def cross_entropy(y_pred, y_true):\n",
    "    ce = -y_true*np.log(y_pred+1e-30)\n",
    "    ce = np.sum(ce, axis=0)\n",
    "    return ce\n",
    "\n",
    "def cal_risk(conf_in, conf_out, y_true):\n",
    "    loss_in = cross_entropy(conf_in, y_true)\n",
    "    loss_out = cross_entropy(conf_out, y_true)\n",
    "    risk = loss_out - loss_in\n",
    "    if risk < 0:\n",
    "        risk = -risk\n",
    "    return risk\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "6ee709f5-8967-47d3-984a-89d443c7dba4",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)\n",
    "X_tmp = dataframe.iloc[:, range(600)].values\n",
    "Y_tmp = np.array([i for i in dataframe.loc[:, 600]])\n",
    "\n",
    "x_test_data = X_tmp[90000:110000]\n",
    "y_test_data = Y_tmp[90000:110000]\n",
    "test_data = CustomDataset(x_test_data, y_test_data, model_transform)\n",
    "test_dataloader = DataLoader(test_data, batch_size=batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "3e17c72b-50c1-4f59-b7d8-c2ad2b26512f",
   "metadata": {},
   "outputs": [],
   "source": [
    "# # 这是策略之三\n",
    "# risk_bound = 1\n",
    "# for i in range(train_num):\n",
    "#     risk = pri_risk_all[mem_data[i]]\n",
    "#     k = 1\n",
    "#     soft_label = k*y_onehot[i] + (1-k)*conf_out_mean[i]\n",
    "#     risk = cal_risk(soft_label, conf_out_mean[i], y_onehot[i])\n",
    "#     while(risk>risk_bound and k>0.01):\n",
    "#         k -= 0.01\n",
    "#         soft_label = k*y_onehot[i] + (1-k)*conf_out_mean[i]\n",
    "#         risk = cal_risk(soft_label, conf_out_mean[i], y_onehot[i])\n",
    "#     y_onehot[i] = soft_label\n",
    "#     if k != 1:\n",
    "#         print(k)\n",
    "\n",
    "# 这是策略之二\n",
    "\n",
    "def label_fix(risk_bound, k_init, y_onehot, conf_out_mean, pri_risk_all, mem_data):\n",
    "    y_soft = y_onehot.copy()\n",
    "    for i in range(y_onehot.shape[0]):\n",
    "        risk = pri_risk_all[mem_data[i]]\n",
    "        if risk < risk_bound:\n",
    "            k = k_init\n",
    "            y_soft[i] = k*y_onehot[i] + (1-k)*conf_out_mean[i]\n",
    "        else:\n",
    "            k = k_init\n",
    "            soft_label = k*y_onehot[i] + (1-k)*conf_out_mean[i]\n",
    "            risk_thre = risk_bound\n",
    "            risk = cal_risk(soft_label, conf_out_mean[i], y_onehot[i])\n",
    "            while(risk>risk_thre and k>0.01):\n",
    "                k -= 0.01\n",
    "                soft_label = k*y_onehot[i] + (1-k)*conf_out_mean[i]\n",
    "                risk = cal_risk(soft_label, conf_out_mean[i], y_onehot[i])\n",
    "            y_soft[i] = soft_label\n",
    "            # print(k)\n",
    "    return conf_out_mean\n",
    "\n",
    "# # 这是策略之一\n",
    "# for i in range(train_num):\n",
    "#     risk = pri_risk_all[mem_data[i]]\n",
    "#     if risk < 1:\n",
    "#         k = 0.5\n",
    "#         y_onehot[i] = k*y_onehot[i] + (1-k)*conf_out_mean[i]\n",
    "#     else:\n",
    "#         k = 0.4/risk\n",
    "#         y_onehot[i] = k*y_onehot[i] + (1-k)*conf_out_mean[i]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "72960d6e-63f8-400a-ac71-1adc26140522",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 0.042140  [  128/30013]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/torch/nn/functional.py:2949: UserWarning: reduction: 'mean' divides the total loss by both the batch size and the support size.'batchmean' divides only by the batch size, and aligns with the KL div math definition.'mean' will be changed to behave the same as 'batchmean' in the next major release.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loss: 0.014561  [12928/30013]\n",
      "loss: 0.009782  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.7%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 0.008905  [  128/30013]\n",
      "loss: 0.006051  [12928/30013]\n",
      "loss: 0.004820  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.4%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.004817  [  128/30013]\n",
      "loss: 0.003443  [12928/30013]\n",
      "loss: 0.002970  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.3%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.002711  [  128/30013]\n",
      "loss: 0.002269  [12928/30013]\n",
      "loss: 0.001790  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.2%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.001948  [  128/30013]\n",
      "loss: 0.001563  [12928/30013]\n",
      "loss: 0.001512  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.7%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.001097  [  128/30013]\n",
      "loss: 0.001039  [12928/30013]\n",
      "loss: 0.001238  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.9%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.001251  [  128/30013]\n",
      "loss: 0.000885  [12928/30013]\n",
      "loss: 0.000857  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.8%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.000996  [  128/30013]\n",
      "loss: 0.001259  [12928/30013]\n",
      "loss: 0.000735  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.0%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.000574  [  128/30013]\n",
      "loss: 0.000383  [12928/30013]\n",
      "loss: 0.000711  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.3%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.000490  [  128/30013]\n",
      "loss: 0.000460  [12928/30013]\n",
      "loss: 0.000620  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.5%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.001039  [  128/30013]\n",
      "loss: 0.000748  [12928/30013]\n",
      "loss: 0.000467  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.6%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.001043  [  128/30013]\n",
      "loss: 0.000361  [12928/30013]\n",
      "loss: 0.000511  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.5%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.000656  [  128/30013]\n",
      "loss: 0.000314  [12928/30013]\n",
      "loss: 0.000383  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.7%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.000397  [  128/30013]\n",
      "loss: 0.000510  [12928/30013]\n",
      "loss: 0.000467  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.3%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.000643  [  128/30013]\n",
      "loss: 0.000756  [12928/30013]\n",
      "loss: 0.000142  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.9%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.000230  [  128/30013]\n",
      "loss: 0.000337  [12928/30013]\n",
      "loss: 0.000078  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.6%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.000079  [  128/30013]\n",
      "loss: 0.000283  [12928/30013]\n",
      "loss: 0.000029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.0%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.000230  [  128/30013]\n",
      "loss: 0.000535  [12928/30013]\n",
      "loss: 0.000170  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.4%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.000363  [  128/30013]\n",
      "loss: 0.000211  [12928/30013]\n",
      "loss: 0.000117  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.000277  [  128/30013]\n",
      "loss: 0.000363  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.9%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.000165  [  128/30013]\n",
      "loss: 0.000441  [12928/30013]\n",
      "loss: 0.000054  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.0%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.000279  [  128/30013]\n",
      "loss: 0.000160  [12928/30013]\n",
      "loss: 0.000331  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.1%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.000284  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000227  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.1%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.000232  [  128/30013]\n",
      "loss: -0.000037  [12928/30013]\n",
      "loss: 0.000191  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.3%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.000699  [  128/30013]\n",
      "loss: -0.000046  [12928/30013]\n",
      "loss: 0.000023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.0%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.001203  [  128/30013]\n",
      "loss: 0.000061  [12928/30013]\n",
      "loss: 0.000653  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.7%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.000523  [  128/30013]\n",
      "loss: 0.000125  [12928/30013]\n",
      "loss: 0.000541  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.3%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.000225  [  128/30013]\n",
      "loss: 0.000133  [12928/30013]\n",
      "loss: 0.000162  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.7%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.000482  [  128/30013]\n",
      "loss: 0.000159  [12928/30013]\n",
      "loss: 0.000345  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.000324  [  128/30013]\n",
      "loss: 0.000249  [12928/30013]\n",
      "loss: 0.000213  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.9%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.000281  [  128/30013]\n",
      "loss: 0.000057  [12928/30013]\n",
      "loss: 0.000043  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.2%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.000050  [  128/30013]\n",
      "loss: 0.000237  [12928/30013]\n",
      "loss: 0.000590  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.2%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.000014  [  128/30013]\n",
      "loss: 0.000251  [12928/30013]\n",
      "loss: 0.000175  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.5%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.000210  [  128/30013]\n",
      "loss: 0.000169  [12928/30013]\n",
      "loss: 0.000430  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.000188  [  128/30013]\n",
      "loss: 0.000237  [12928/30013]\n",
      "loss: 0.000140  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.000151  [  128/30013]\n",
      "loss: 0.000121  [12928/30013]\n",
      "loss: 0.000081  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.5%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.000112  [  128/30013]\n",
      "loss: 0.000106  [12928/30013]\n",
      "loss: -0.000031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.4%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.000052  [  128/30013]\n",
      "loss: 0.000040  [12928/30013]\n",
      "loss: -0.000021  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.000350  [  128/30013]\n",
      "loss: 0.000097  [12928/30013]\n",
      "loss: 0.000040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.000320  [  128/30013]\n",
      "loss: 0.000160  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.000057  [  128/30013]\n",
      "loss: 0.000113  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.000013  [  128/30013]\n",
      "loss: 0.000022  [12928/30013]\n",
      "loss: -0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: -0.000055  [  128/30013]\n",
      "loss: -0.000076  [12928/30013]\n",
      "loss: -0.000031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.000184  [  128/30013]\n",
      "loss: 0.000029  [12928/30013]\n",
      "loss: 0.000059  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000066  [  128/30013]\n",
      "loss: 0.000063  [12928/30013]\n",
      "loss: -0.000009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.5%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000026  [  128/30013]\n",
      "loss: -0.000007  [12928/30013]\n",
      "loss: 0.000019  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.4%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: -0.000045  [  128/30013]\n",
      "loss: 0.000198  [12928/30013]\n",
      "loss: 0.000158  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.5%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000016  [  128/30013]\n",
      "loss: -0.000003  [12928/30013]\n",
      "loss: 0.000048  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.8%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000103  [  128/30013]\n",
      "loss: 0.000089  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: -0.000008  [  128/30013]\n",
      "loss: -0.000008  [12928/30013]\n",
      "loss: 0.000035  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: -0.000043  [  128/30013]\n",
      "loss: -0.000122  [12928/30013]\n",
      "loss: -0.000052  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000031  [  128/30013]\n",
      "loss: -0.000072  [12928/30013]\n",
      "loss: -0.000028  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: -0.000109  [  128/30013]\n",
      "loss: 0.000045  [12928/30013]\n",
      "loss: 0.000073  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: -0.000100  [  128/30013]\n",
      "loss: 0.000014  [12928/30013]\n",
      "loss: -0.000015  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: -0.000116  [  128/30013]\n",
      "loss: -0.000089  [12928/30013]\n",
      "loss: 0.000155  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: -0.000041  [  128/30013]\n",
      "loss: -0.000117  [12928/30013]\n",
      "loss: -0.000056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: -0.000069  [  128/30013]\n",
      "loss: -0.000094  [12928/30013]\n",
      "loss: -0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000016  [  128/30013]\n",
      "loss: -0.000078  [12928/30013]\n",
      "loss: -0.000021  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000043  [  128/30013]\n",
      "loss: -0.000061  [12928/30013]\n",
      "loss: 0.000052  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: -0.000077  [  128/30013]\n",
      "loss: -0.000174  [12928/30013]\n",
      "loss: -0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: -0.000083  [  128/30013]\n",
      "loss: -0.000111  [12928/30013]\n",
      "loss: 0.000023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000220  [  128/30013]\n",
      "loss: -0.000134  [12928/30013]\n",
      "loss: -0.000122  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: -0.000033  [  128/30013]\n",
      "loss: -0.000108  [12928/30013]\n",
      "loss: -0.000122  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: -0.000040  [12928/30013]\n",
      "loss: -0.000050  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: -0.000059  [12928/30013]\n",
      "loss: -0.000081  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: -0.000034  [  128/30013]\n",
      "loss: -0.000046  [12928/30013]\n",
      "loss: -0.000044  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: -0.000135  [12928/30013]\n",
      "loss: 0.000057  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: -0.000054  [  128/30013]\n",
      "loss: 0.000023  [12928/30013]\n",
      "loss: -0.000095  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000013  [  128/30013]\n",
      "loss: -0.000024  [12928/30013]\n",
      "loss: -0.000044  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: -0.000145  [  128/30013]\n",
      "loss: -0.000146  [12928/30013]\n",
      "loss: 0.000109  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: -0.000111  [  128/30013]\n",
      "loss: -0.000162  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: -0.000148  [  128/30013]\n",
      "loss: -0.000157  [12928/30013]\n",
      "loss: 0.000111  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: -0.000177  [  128/30013]\n",
      "loss: -0.000159  [12928/30013]\n",
      "loss: -0.000062  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: -0.000160  [  128/30013]\n",
      "loss: -0.000119  [12928/30013]\n",
      "loss: -0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: -0.000043  [  128/30013]\n",
      "loss: -0.000146  [12928/30013]\n",
      "loss: 0.000117  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: -0.000106  [  128/30013]\n",
      "loss: -0.000198  [12928/30013]\n",
      "loss: -0.000087  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000027  [  128/30013]\n",
      "loss: -0.000230  [12928/30013]\n",
      "loss: -0.000084  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.6%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: -0.000154  [  128/30013]\n",
      "loss: -0.000231  [12928/30013]\n",
      "loss: -0.000153  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.7%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: -0.000212  [  128/30013]\n",
      "loss: -0.000208  [12928/30013]\n",
      "loss: -0.000074  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.8%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: -0.000157  [  128/30013]\n",
      "loss: -0.000190  [12928/30013]\n",
      "loss: -0.000127  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.5%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: -0.000102  [  128/30013]\n",
      "loss: -0.000141  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: -0.000090  [  128/30013]\n",
      "loss: -0.000170  [12928/30013]\n",
      "loss: -0.000076  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: -0.000143  [  128/30013]\n",
      "loss: -0.000151  [12928/30013]\n",
      "loss: -0.000096  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.3%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: -0.000089  [  128/30013]\n",
      "loss: -0.000193  [12928/30013]\n",
      "loss: -0.000063  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: -0.000001  [  128/30013]\n",
      "loss: -0.000154  [12928/30013]\n",
      "loss: -0.000146  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.7%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: -0.000192  [  128/30013]\n",
      "loss: -0.000157  [12928/30013]\n",
      "loss: -0.000062  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.8%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: -0.000164  [  128/30013]\n",
      "loss: -0.000135  [12928/30013]\n",
      "loss: -0.000122  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.6%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: -0.000162  [  128/30013]\n",
      "loss: -0.000049  [12928/30013]\n",
      "loss: -0.000125  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.6%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: -0.000135  [  128/30013]\n",
      "loss: -0.000144  [12928/30013]\n",
      "loss: -0.000125  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.6%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: -0.000176  [  128/30013]\n",
      "loss: -0.000108  [12928/30013]\n",
      "loss: -0.000132  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.5%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: -0.000077  [  128/30013]\n",
      "loss: -0.000130  [12928/30013]\n",
      "loss: -0.000135  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: -0.000118  [  128/30013]\n",
      "loss: -0.000143  [12928/30013]\n",
      "loss: -0.000132  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: -0.000179  [  128/30013]\n",
      "loss: -0.000161  [12928/30013]\n",
      "loss: -0.000096  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.5%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: -0.000205  [  128/30013]\n",
      "loss: -0.000201  [12928/30013]\n",
      "loss: -0.000099  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: -0.000179  [  128/30013]\n",
      "loss: -0.000177  [12928/30013]\n",
      "loss: -0.000165  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.6%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: -0.000120  [  128/30013]\n",
      "loss: -0.000184  [12928/30013]\n",
      "loss: -0.000163  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.7%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: -0.000195  [  128/30013]\n",
      "loss: -0.000169  [12928/30013]\n",
      "loss: -0.000165  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.8%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: -0.000245  [  128/30013]\n",
      "loss: -0.000013  [12928/30013]\n",
      "loss: -0.000141  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.9%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: -0.000222  [  128/30013]\n",
      "loss: -0.000208  [12928/30013]\n",
      "loss: -0.000149  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: -0.000260  [  128/30013]\n",
      "loss: -0.000191  [12928/30013]\n",
      "loss: -0.000137  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.9%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 80.3%, Avg loss: 0.601048 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 83.4%, Avg loss: 0.530726 \n",
      "\n",
      " Error: \n",
      " Accuracy: 83.4%  \n",
      "\n",
      "AUC value is: 0.5338019835694836\n",
      "Accuracy is: 0.49078333333333335\n",
      " Error: \n",
      " Accuracy: 83.4%  \n",
      "\n",
      "AUC value is: 0.5630052853308309\n",
      "Accuracy is: 0.5105\n",
      "AUC value is: 0.5892949012120424\n",
      "Accuracy is: 0.50015\n"
     ]
    }
   ],
   "source": [
    "# risk_bound_list = [0.01, 0.05, 0.1, 0.5, 1, 2, 4, 6, 10]\n",
    "risk_bound_list = [0.01]\n",
    "# k_init_list = [0, 0.2, 0.4, 0.5, 0.6, 0.8, 1]\n",
    "k_init_list = [0.4]\n",
    "model_test_correct = []\n",
    "average_lira_attack = []\n",
    "average_base_attack = []\n",
    "average_shadow_attack = []\n",
    "risk_base_attack = []\n",
    "risk_lira_attack = []\n",
    "risk_shadow_attack = []\n",
    "top_risk = 2000\n",
    "for risk_bound in risk_bound_list: # 对隐私风险约束进行遍历\n",
    "    model_test_correct_t = []\n",
    "    average_lira_attack_t = []\n",
    "    average_base_attack_t = []\n",
    "    average_shadow_attack_t = []\n",
    "    risk_base_attack_t = []\n",
    "    risk_lira_attack_t = []\n",
    "    risk_shadow_attack_t = []\n",
    "    for k_init in k_init_list: # 对隐私风险约束进行遍历\n",
    "        y_soft = label_fix(risk_bound, k_init, y_onehot, conf_out_mean, pri_risk_all, mem_data)\n",
    "        x = X_data[mem_label]\n",
    "        y = y_soft\n",
    "        train_data = CustomDataset(x, y, model_transform)\n",
    "        train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "        TargetModel = globals()['create_{}_model'.format(model)](x.shape[1], 100)\n",
    "        TargetModel.to(device)\n",
    "        loss_fn = nn.KLDivLoss()  # 使用KL散度损失\n",
    "        # loss_fn = nn.CrossEntropyLoss()\n",
    "        optimizer = torch.optim.Adam(TargetModel.parameters(), lr=LEARNING_RATE)\n",
    "        for t in range(epochs):\n",
    "            print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "            train_softlabel(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "            # train_onehot(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "        print(\"Done!\")\n",
    "        # 训练完目标模型\n",
    "        loss_fn = nn.CrossEntropyLoss()\n",
    "        correct = evaluate(test_dataloader, TargetModel, loss_fn, device)\n",
    "        model_test_correct_t.append(correct)\n",
    "        # 执行基线攻击\n",
    "        pred_result = base_attack(all_dataloader, TargetModel, loss_fn, device)\n",
    "        accuracy = metrics.accuracy_score(train_keep[0], pred_result)\n",
    "        average_base_attack_t.append(accuracy)\n",
    "        pred_clip = pred_result[pri_risk_rank[:top_risk]]\n",
    "        mem_clip = train_keep[0][pri_risk_rank[:top_risk]]\n",
    "        accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "        risk_base_attack_t.append(accuracy)\n",
    "        # 执行似然比攻击\n",
    "        _, score = get_score_from_model(all_dataloader, TargetModel, device)\n",
    "        pred_result = LIRA_attack(train_keep, score_all, score, train_keep[0])\n",
    "        accuracy = evaluate_ROC(pred_result, train_keep[0], threshold=0)\n",
    "        average_lira_attack_t.append(accuracy)\n",
    "        pred_clip = pred_result[pri_risk_rank[:top_risk]]\n",
    "        mem_clip = train_keep[0][pri_risk_rank[:top_risk]]\n",
    "        pred_clip = pred_clip > 0\n",
    "        accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "        risk_lira_attack_t.append(accuracy)\n",
    "        # 执行影子模型攻击\n",
    "\n",
    "        # 提取数据集在模型上的置信度输出\n",
    "        targetX, _ = get_model_pred(all_dataloader, TargetModel, device)\n",
    "        targetX = targetX.detach().cpu().numpy()\n",
    "        targetX = targetX.astype(np.float32)\n",
    "        \n",
    "        top_k = 3\n",
    "        if top_k:\n",
    "            # 仅使用概率向量的前3个值\n",
    "            targetX, _ = get_top_k_conf(top_k, targetX, targetX)\n",
    "\n",
    "        shadow_attack_data = CustomDataset(targetX, train_keep[0], attack_transform)\n",
    "        shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "        attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "        attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "        \n",
    "        pred_clip = attack_test_scores[pri_risk_rank[:top_risk]]\n",
    "        mem_clip = train_keep[0][pri_risk_rank[:top_risk]]\n",
    "        accuracy = evaluate_ROC(pred_clip, mem_clip)\n",
    "        risk_shadow_attack_t.append(accuracy)\n",
    "        \n",
    "        accuracy = evaluate_ROC(attack_test_scores, attack_test_mem)\n",
    "        average_shadow_attack_t.append(accuracy)\n",
    "    \n",
    "    \n",
    "    model_test_correct.append(model_test_correct_t)\n",
    "    average_lira_attack.append(average_lira_attack_t)\n",
    "    average_base_attack.append(average_base_attack_t)\n",
    "    average_shadow_attack.append(average_shadow_attack_t)\n",
    "    risk_base_attack.append(risk_base_attack_t)\n",
    "    risk_lira_attack.append(risk_lira_attack_t)\n",
    "    risk_shadow_attack.append(risk_shadow_attack_t)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "974971d1-ad99-4859-af52-622067518d44",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[0.80255]]"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model_test_correct"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "b9363daa-d8b9-43b4-b45d-676c1a126ca2",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[0.49078333333333335]]"
      ]
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "average_lira_attack"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "fbe6bca6-1352-4b45-b92e-c0e90a34158b",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[0.50015]]"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "average_shadow_attack"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "59e56d7d-2da5-4dbf-aefa-ce60f6952607",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[0.4885]]"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "risk_base_attack"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "11c630f7-d8d4-4533-ad97-06bd5b8b752d",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[0.5135]]"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "risk_lira_attack"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6f826953-0f06-4320-96fe-64fe8f1c96cd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "22a3f6ef-1b01-4a84-8a2e-b93802f9ab32",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0e5d1389-3f8e-450c-b90f-1c1322ce026f",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# x = X_data[mem_label]\n",
    "# y = y_soft\n",
    "# train_data = CustomDataset(x, y, model_transform)\n",
    "# train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "# TargetModel = globals()['create_{}_model'.format(model)](x.shape[1], 100)\n",
    "# TargetModel.to(device)\n",
    "# loss_fn = nn.KLDivLoss()  # 使用KL散度损失\n",
    "# optimizer = torch.optim.Adam(TargetModel.parameters(), lr=LEARNING_RATE)\n",
    "# for t in range(epochs):\n",
    "#     print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "#     train_softlabel(train_dataloader, TargetModel, loss_fn, optimizer, device)\n",
    "# print(\"Done!\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d7ef4969-de10-4972-b2cc-6c160e4c3888",
   "metadata": {},
   "outputs": [],
   "source": [
    "evaluate(test_dataloader, TargetModel, loss_fn, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b08eee75-1667-4467-adcf-211a35147c40",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "74be44ec-2b61-4221-88ae-c210b237df1f",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Test Error: \n",
      " Accuracy: 88.0%, Avg loss: 0.366423 \n",
      "\n",
      "0.5116\n"
     ]
    }
   ],
   "source": [
    "loss_fn = nn.CrossEntropyLoss()\n",
    "pred_result = base_attack(all_dataloader, TargetModel, loss_fn, device)\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "id": "e4d6376a-15a3-4b28-b027-7f5fa3cf6669",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " Error: \n",
      " Accuracy: 88.0%  \n",
      "\n",
      "AUC value is: 0.551598308022349\n",
      "Accuracy is: 0.4912666666666667\n",
      "0.5028\n"
     ]
    }
   ],
   "source": [
    "_, score = get_score_from_model(all_dataloader, TargetModel, device)\n",
    "pred_result = LIRA_attack(train_keep, score_all, score, train_keep[0])\n",
    "evaluate_ROC(pred_result, train_keep[0], threshold=0)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "9a15990e-2f5d-4c1f-abf3-6f9ff53d4da1",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "fd49e0e6-3093-47b5-8b5d-e34970bced1b",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "NN_4layer(\n",
       "  (fc1): Linear(in_features=600, out_features=1024, bias=True)\n",
       "  (fc2): Linear(in_features=1024, out_features=512, bias=True)\n",
       "  (fc3): Linear(in_features=512, out_features=256, bias=True)\n",
       "  (fc4): Linear(in_features=256, out_features=100, bias=True)\n",
       "  (Tanh): Tanh()\n",
       ")"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "CompareModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_model{}.pth\".format(data_name, model, epochs, 0))\n",
    "# print(Reference_Model)\n",
    "CompareModel.load_state_dict(torch.load(weight_path))\n",
    "CompareModel.to(device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "aa548896-635c-475d-8b08-6604046639dd",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Test Error: \n",
      " Accuracy: 93.2%, Avg loss: 0.406902 \n",
      "\n",
      "0.8634\n",
      "0.5682666666666667\n"
     ]
    }
   ],
   "source": [
    "loss_fn = nn.CrossEntropyLoss()\n",
    "pred_result = base_attack(all_dataloader, CompareModel, loss_fn, device)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)\n",
    "\n",
    "accuracy = metrics.accuracy_score(train_keep[0], pred_result)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "b20b9720-0f13-4930-a526-9340055ce185",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "AUC value is: 0.8478439214284699\n",
      "Accuracy is: 0.7350833333333333\n",
      "0.952\n"
     ]
    }
   ],
   "source": [
    "pred_result = LIRA_attack(train_keep, score_all, score_all[0], train_keep[0])\n",
    "evaluate_ROC(pred_result, train_keep[0], threshold=0)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "1c3b06ae-53b3-44a4-840d-51b978c3caa2",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Test Error: \n",
      " Accuracy: 86.7%, Avg loss: 0.806193 \n",
      "\n"
     ]
    }
   ],
   "source": [
    "evaluate(test_dataloader, CompareModel, loss_fn, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "373e013e-6d50-41da-ad7c-ed9ecdb61370",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1bb563f4-61eb-4e94-8a4a-31618dba7f6e",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7e97378d-80d7-490d-a058-a095fb5d2d21",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ea79b937-147d-434d-a857-54aa3d65c89c",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "a33e4127-2475-4516-8e1c-e891ed2f2a84",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ccbe05cc-7a77-4693-b9d6-a5f606521e8e",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "221ac409-b522-41ba-b996-3ab3d0f79bfd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e6b4d3d5-4572-4f6e-8e23-bd19e7234f14",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e416047a-e5ed-4b62-9315-26579657406f",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "96285dde-593f-4540-9bdc-39e035363557",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "541f4066-2bc3-424a-95f0-1f8801a953ec",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "36077ed2-9462-4385-aac5-5aabb79723f3",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b756d645-8e44-4e37-b9ff-74737e8cd0b0",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "19150127-3d94-4f9a-bd4d-bd0ca2181b40",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "446651cb-4d3c-476c-979c-8312b8f591f9",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "cd6ece2e-0b0d-4775-9139-c0450b6d0f72",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "5299f222-1d83-4c73-a965-a98130320158",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b177cb8c-748d-4bd0-aff6-46c334a3d147",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "6eb75448-c703-45c2-9daf-95a245ce74b2",
   "metadata": {},
   "source": [
    "### dp-sgd的实现"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "a6897131-b124-4253-83ba-5985522a441c",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)\n",
    "X_tmp = dataframe.iloc[:, range(600)].values\n",
    "Y_tmp = np.array([i for i in dataframe.loc[:, 600]])\n",
    "x_test_data = X_tmp[90000:110000]\n",
    "y_test_data = Y_tmp[90000:110000]\n",
    "\n",
    "test_data = CustomDataset(x_test_data, y_test_data, model_transform)\n",
    "test_dataloader = DataLoader(test_data, batch_size=batch_size)\n",
    "\n",
    "# 加载完整的训练数据集\n",
    "X_data, Y_data, train_keep = load_Purchase100_limited_keep(num_shadowsets, prop_keep, seed)\n",
    "# 创建对应的dotaloader\n",
    "all_data = CustomDataset(X_data, Y_data, model_transform)\n",
    "all_dataloader = DataLoader(all_data, batch_size=BATCH_SIZE, shuffle=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b544900a-cfb3-4756-9be1-3cd332391bcf",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "80f11c4c-02f7-4a11-9995-b6ef78c2f9ca",
   "metadata": {},
   "outputs": [],
   "source": [
    "# epsilon_list = [0.5,1,2,4,6,8,10,15,20,25,30,35,40]\n",
    "attack_num = 2000\n",
    "dp_test_acc = []\n",
    "dp_LIRA_attack_acc = []\n",
    "dp_base_attack_acc = []\n",
    "dp_shadow_attack_acc = []\n",
    "dp_part_LIRA_attack_acc = []\n",
    "dp_part_base_attack_acc = []\n",
    "epsilon_list = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "aa392449-283e-4787-b9c7-247ccad335b3",
   "metadata": {},
   "outputs": [],
   "source": [
    "# dp训练相关参数\n",
    "delta = 1e-6\n",
    "max_grad_norm = 1\n",
    "# sigma_list = [1.7,1.6,1.5,1.4,1.3,1.2,1.1,1,0.9,0.8,0.85,0.75,0.7,0.65,0.6,0.55,0.5,0.46]\n",
    "sigma_list = [2.5,2,0.4]\n",
    "# sigma_list = [0.5]\n",
    "enable_dp = True"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "2b4d2f94-2933-4155-88cf-431ddf9f0d54",
   "metadata": {},
   "outputs": [],
   "source": [
    "# if enable_dp:\n",
    "#     privacy_engine = PrivacyEngine(secure_mode=False)\n",
    "#     optimizer = torch.optim.Adam(ReferenceModel.parameters(), lr=LEARNING_RATE)\n",
    "#     ReferenceModel, optimizer, train_dataloader = privacy_engine.make_private(\n",
    "#         module=ReferenceModel,\n",
    "#         optimizer=optimizer,\n",
    "#         data_loader=train_dataloader,\n",
    "#         noise_multiplier=sigma,\n",
    "#         max_grad_norm=max_grad_norm,\n",
    "#     )\n",
    "# else:\n",
    "#     optimizer = torch.optim.Adam(ReferenceModel.parameters(), lr=LEARNING_RATE)\n",
    "# for t in range(EPOCHS):\n",
    "#     print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "#     if enable_dp:\n",
    "#         train_DP(train_dataloader, ReferenceModel, loss_fn, optimizer, device, privacy_engine, delta, t+1)\n",
    "#     else:\n",
    "#         train(train_dataloader, ReferenceModel, loss_fn, optimizer, device)\n",
    "#         # train(ReferenceModel, device, train_dataloader, optimizer, t+1)\n",
    "# print(\"Done!\") "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "b4a7cdf4-1361-4609-9547-9d95076c0573",
   "metadata": {},
   "outputs": [],
   "source": [
    "from opacus import PrivacyEngine"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "c7f43d30-59dc-4cc1-b2f9-21029294f23f",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/opacus/privacy_engine.py:95: UserWarning: Secure RNG turned off. This is perfectly fine for experimentation as it allows for much faster training performance, but remember to turn it on and retrain one last time before production with ``secure_mode`` turned on.\n",
      "  warnings.warn(\n",
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/torch/nn/modules/module.py:1352: UserWarning: Using a non-full backward hook when the forward contains multiple autograd Nodes is deprecated and will be removed in future versions. This hook will be missing some grad_input. Please use register_full_backward_hook to get the documented behavior.\n",
      "  warnings.warn(\"Using a non-full backward hook when the forward contains multiple autograd Nodes \"\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.604567  [  136/30013]\n",
      "loss: 4.594816  [12322/30013]\n",
      "loss: 4.553447  [22713/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.5%\n",
      "Train Epoch: 1 \t(ε = 0.11, δ = 1e-06)\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.542726  [  133/30013]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/opacus/accountants/analysis/rdp.py:332: UserWarning: Optimal order is the largest alpha. Please consider expanding the range of alphas to get a tighter privacy bound.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loss: 4.540909  [13130/30013]\n",
      "loss: 4.496472  [27135/30013]\n",
      "Train Error: \n",
      " Accuracy: 3.0%\n",
      "Train Epoch: 2 \t(ε = 0.16, δ = 1e-06)\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 4.481927  [  133/30013]\n",
      "loss: 4.475967  [15958/30013]\n",
      "loss: 4.451478  [29145/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.9%\n",
      "Train Epoch: 3 \t(ε = 0.19, δ = 1e-06)\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 4.473804  [  112/30013]\n",
      "loss: 4.430656  [11514/30013]\n",
      "loss: 4.425951  [24723/30013]\n",
      "Train Error: \n",
      " Accuracy: 3.4%\n",
      "Train Epoch: 4 \t(ε = 0.22, δ = 1e-06)\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 4.412372  [  143/30013]\n",
      "loss: 4.399811  [12524/30013]\n",
      "loss: 4.434165  [24522/30013]\n",
      "Train Error: \n",
      " Accuracy: 3.4%\n",
      "Train Epoch: 5 \t(ε = 0.25, δ = 1e-06)\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 4.375854  [  103/30013]\n",
      "loss: 4.374858  [13332/30013]\n",
      "loss: 4.257840  [28140/30013]\n",
      "Train Error: \n",
      " Accuracy: 4.3%\n",
      "Train Epoch: 6 \t(ε = 0.27, δ = 1e-06)\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 4.316243  [  115/30013]\n",
      "loss: 4.376977  [13736/30013]\n",
      "loss: 4.346258  [29145/30013]\n",
      "Train Error: \n",
      " Accuracy: 5.3%\n",
      "Train Epoch: 7 \t(ε = 0.30, δ = 1e-06)\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 4.331836  [  107/30013]\n",
      "loss: 4.320870  [12726/30013]\n",
      "loss: 4.252645  [19899/30013]\n",
      "Train Error: \n",
      " Accuracy: 6.6%\n",
      "Train Epoch: 8 \t(ε = 0.32, δ = 1e-06)\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 4.258186  [  142/30013]\n",
      "loss: 4.219941  [12726/30013]\n",
      "loss: 4.223825  [26130/30013]\n",
      "Train Error: \n",
      " Accuracy: 7.0%\n",
      "Train Epoch: 9 \t(ε = 0.34, δ = 1e-06)\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 4.161555  [  142/30013]\n",
      "loss: 4.217661  [12928/30013]\n",
      "loss: 4.121081  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 8.1%\n",
      "Train Epoch: 10 \t(ε = 0.35, δ = 1e-06)\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 4.136074  [  111/30013]\n",
      "loss: 4.097988  [14039/30013]\n",
      "loss: 4.184331  [22512/30013]\n",
      "Train Error: \n",
      " Accuracy: 10.0%\n",
      "Train Epoch: 11 \t(ε = 0.37, δ = 1e-06)\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 4.136595  [  120/30013]\n",
      "loss: 4.006088  [12524/30013]\n",
      "loss: 3.954929  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 10.8%\n",
      "Train Epoch: 12 \t(ε = 0.39, δ = 1e-06)\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 3.917111  [  123/30013]\n",
      "loss: 4.016967  [12625/30013]\n",
      "loss: 3.908026  [23115/30013]\n",
      "Train Error: \n",
      " Accuracy: 12.2%\n",
      "Train Epoch: 13 \t(ε = 0.40, δ = 1e-06)\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 3.877771  [  118/30013]\n",
      "loss: 3.945913  [12524/30013]\n",
      "loss: 3.773932  [27336/30013]\n",
      "Train Error: \n",
      " Accuracy: 13.8%\n",
      "Train Epoch: 14 \t(ε = 0.42, δ = 1e-06)\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 3.812057  [  120/30013]\n",
      "loss: 3.769499  [12524/30013]\n",
      "loss: 3.717388  [30753/30013]\n",
      "Train Error: \n",
      " Accuracy: 14.0%\n",
      "Train Epoch: 15 \t(ε = 0.44, δ = 1e-06)\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 3.693870  [  137/30013]\n",
      "loss: 3.595452  [10706/30013]\n",
      "loss: 3.623685  [24522/30013]\n",
      "Train Error: \n",
      " Accuracy: 15.9%\n",
      "Train Epoch: 16 \t(ε = 0.45, δ = 1e-06)\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 3.593097  [  140/30013]\n",
      "loss: 3.648011  [15655/30013]\n",
      "loss: 3.538325  [25125/30013]\n",
      "Train Error: \n",
      " Accuracy: 17.9%\n",
      "Train Epoch: 17 \t(ε = 0.46, δ = 1e-06)\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 3.494393  [  133/30013]\n",
      "loss: 3.469729  [13837/30013]\n",
      "loss: 3.524508  [28140/30013]\n",
      "Train Error: \n",
      " Accuracy: 17.8%\n",
      "Train Epoch: 18 \t(ε = 0.48, δ = 1e-06)\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 3.429684  [  126/30013]\n",
      "loss: 3.535704  [11918/30013]\n",
      "loss: 3.358334  [30351/30013]\n",
      "Train Error: \n",
      " Accuracy: 19.2%\n",
      "Train Epoch: 19 \t(ε = 0.49, δ = 1e-06)\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 3.271441  [  136/30013]\n",
      "loss: 3.247946  [13938/30013]\n",
      "loss: 3.400170  [23718/30013]\n",
      "Train Error: \n",
      " Accuracy: 20.5%\n",
      "Train Epoch: 20 \t(ε = 0.51, δ = 1e-06)\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 3.375181  [  127/30013]\n",
      "loss: 3.257726  [12726/30013]\n",
      "loss: 3.163961  [22512/30013]\n",
      "Train Error: \n",
      " Accuracy: 21.6%\n",
      "Train Epoch: 21 \t(ε = 0.52, δ = 1e-06)\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 3.278402  [  130/30013]\n",
      "loss: 3.120147  [12423/30013]\n",
      "loss: 3.139349  [25929/30013]\n",
      "Train Error: \n",
      " Accuracy: 22.9%\n",
      "Train Epoch: 22 \t(ε = 0.53, δ = 1e-06)\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 3.115102  [  110/30013]\n",
      "loss: 3.206249  [13130/30013]\n",
      "loss: 3.152434  [26532/30013]\n",
      "Train Error: \n",
      " Accuracy: 23.9%\n",
      "Train Epoch: 23 \t(ε = 0.54, δ = 1e-06)\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 3.141935  [  129/30013]\n",
      "loss: 3.206712  [13231/30013]\n",
      "loss: 2.872111  [24924/30013]\n",
      "Train Error: \n",
      " Accuracy: 24.9%\n",
      "Train Epoch: 24 \t(ε = 0.56, δ = 1e-06)\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 3.131283  [  128/30013]\n",
      "loss: 3.057976  [10100/30013]\n",
      "loss: 2.860612  [27336/30013]\n",
      "Train Error: \n",
      " Accuracy: 26.7%\n",
      "Train Epoch: 25 \t(ε = 0.57, δ = 1e-06)\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 2.916464  [  140/30013]\n",
      "loss: 3.010609  [12726/30013]\n",
      "loss: 2.937815  [27135/30013]\n",
      "Train Error: \n",
      " Accuracy: 27.7%\n",
      "Train Epoch: 26 \t(ε = 0.58, δ = 1e-06)\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 3.110218  [  123/30013]\n",
      "loss: 2.828365  [13231/30013]\n",
      "loss: 2.799601  [27336/30013]\n",
      "Train Error: \n",
      " Accuracy: 29.1%\n",
      "Train Epoch: 27 \t(ε = 0.59, δ = 1e-06)\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 2.846548  [  132/30013]\n",
      "loss: 2.979443  [11615/30013]\n",
      "loss: 2.831432  [25929/30013]\n",
      "Train Error: \n",
      " Accuracy: 29.6%\n",
      "Train Epoch: 28 \t(ε = 0.60, δ = 1e-06)\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 2.785661  [  147/30013]\n",
      "loss: 2.722120  [13433/30013]\n",
      "loss: 2.792849  [22512/30013]\n",
      "Train Error: \n",
      " Accuracy: 31.0%\n",
      "Train Epoch: 29 \t(ε = 0.61, δ = 1e-06)\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 2.770232  [  121/30013]\n",
      "loss: 2.759023  [13130/30013]\n",
      "loss: 2.717859  [26130/30013]\n",
      "Train Error: \n",
      " Accuracy: 32.2%\n",
      "Train Epoch: 30 \t(ε = 0.62, δ = 1e-06)\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 2.776865  [  140/30013]\n",
      "loss: 2.867764  [13029/30013]\n",
      "loss: 2.623561  [28743/30013]\n",
      "Train Error: \n",
      " Accuracy: 33.6%\n",
      "Train Epoch: 31 \t(ε = 0.63, δ = 1e-06)\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 2.635132  [  154/30013]\n",
      "loss: 2.648531  [14140/30013]\n",
      "loss: 2.535631  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 33.2%\n",
      "Train Epoch: 32 \t(ε = 0.65, δ = 1e-06)\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 2.644135  [  140/30013]\n",
      "loss: 2.651891  [13130/30013]\n",
      "loss: 2.485281  [21909/30013]\n",
      "Train Error: \n",
      " Accuracy: 34.0%\n",
      "Train Epoch: 33 \t(ε = 0.66, δ = 1e-06)\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 2.580845  [  128/30013]\n",
      "loss: 2.716586  [12423/30013]\n",
      "loss: 2.542403  [24924/30013]\n",
      "Train Error: \n",
      " Accuracy: 36.0%\n",
      "Train Epoch: 34 \t(ε = 0.67, δ = 1e-06)\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 2.495593  [  130/30013]\n",
      "loss: 2.408111  [12322/30013]\n",
      "loss: 2.412319  [26733/30013]\n",
      "Train Error: \n",
      " Accuracy: 36.2%\n",
      "Train Epoch: 35 \t(ε = 0.68, δ = 1e-06)\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 2.478218  [  125/30013]\n",
      "loss: 2.467340  [13332/30013]\n",
      "loss: 2.423480  [26532/30013]\n",
      "Train Error: \n",
      " Accuracy: 36.5%\n",
      "Train Epoch: 36 \t(ε = 0.69, δ = 1e-06)\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 2.571054  [  141/30013]\n",
      "loss: 2.544667  [10908/30013]\n",
      "loss: 2.332210  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 36.7%\n",
      "Train Epoch: 37 \t(ε = 0.70, δ = 1e-06)\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 2.629201  [  122/30013]\n",
      "loss: 2.256293  [11817/30013]\n",
      "loss: 2.420249  [30552/30013]\n",
      "Train Error: \n",
      " Accuracy: 37.1%\n",
      "Train Epoch: 38 \t(ε = 0.71, δ = 1e-06)\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 2.407541  [  127/30013]\n",
      "loss: 2.268455  [14241/30013]\n",
      "loss: 2.470660  [28944/30013]\n",
      "Train Error: \n",
      " Accuracy: 37.5%\n",
      "Train Epoch: 39 \t(ε = 0.72, δ = 1e-06)\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 2.408664  [  123/30013]\n",
      "loss: 2.435674  [15049/30013]\n",
      "loss: 2.273563  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.4%\n",
      "Train Epoch: 40 \t(ε = 0.73, δ = 1e-06)\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 2.255115  [  133/30013]\n",
      "loss: 2.363426  [12928/30013]\n",
      "loss: 2.309083  [21909/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.5%\n",
      "Train Epoch: 41 \t(ε = 0.73, δ = 1e-06)\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 2.385521  [  119/30013]\n",
      "loss: 2.261139  [11514/30013]\n",
      "loss: 2.237075  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.3%\n",
      "Train Epoch: 42 \t(ε = 0.74, δ = 1e-06)\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 2.401173  [  115/30013]\n",
      "loss: 2.325492  [11110/30013]\n",
      "loss: 2.275852  [21909/30013]\n",
      "Train Error: \n",
      " Accuracy: 40.2%\n",
      "Train Epoch: 43 \t(ε = 0.75, δ = 1e-06)\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 2.385904  [  132/30013]\n",
      "loss: 2.265321  [14645/30013]\n",
      "loss: 2.297679  [26934/30013]\n",
      "Train Error: \n",
      " Accuracy: 40.8%\n",
      "Train Epoch: 44 \t(ε = 0.76, δ = 1e-06)\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 2.335922  [  105/30013]\n",
      "loss: 2.212072  [14241/30013]\n",
      "loss: 2.261707  [27135/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.9%\n",
      "Train Epoch: 45 \t(ε = 0.77, δ = 1e-06)\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 2.167427  [  112/30013]\n",
      "loss: 2.204731  [14645/30013]\n",
      "loss: 2.159494  [24522/30013]\n",
      "Train Error: \n",
      " Accuracy: 41.4%\n",
      "Train Epoch: 46 \t(ε = 0.78, δ = 1e-06)\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 2.146416  [  116/30013]\n",
      "loss: 2.151931  [12322/30013]\n",
      "loss: 2.077678  [26733/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.1%\n",
      "Train Epoch: 47 \t(ε = 0.79, δ = 1e-06)\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 2.107337  [  133/30013]\n",
      "loss: 2.172006  [13433/30013]\n",
      "loss: 2.201766  [27738/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.1%\n",
      "Train Epoch: 48 \t(ε = 0.80, δ = 1e-06)\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 2.208639  [  141/30013]\n",
      "loss: 2.220456  [12524/30013]\n",
      "loss: 2.310167  [24723/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.3%\n",
      "Train Epoch: 49 \t(ε = 0.81, δ = 1e-06)\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 2.213126  [  111/30013]\n",
      "loss: 2.186463  [11918/30013]\n",
      "loss: 2.150469  [23316/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.4%\n",
      "Train Epoch: 50 \t(ε = 0.82, δ = 1e-06)\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 2.150780  [  143/30013]\n",
      "loss: 2.207640  [13433/30013]\n",
      "loss: 2.061360  [28341/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.5%\n",
      "Train Epoch: 51 \t(ε = 0.82, δ = 1e-06)\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 2.111173  [  143/30013]\n",
      "loss: 2.093432  [12019/30013]\n",
      "loss: 2.135426  [24120/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.4%\n",
      "Train Epoch: 52 \t(ε = 0.83, δ = 1e-06)\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 2.069716  [  147/30013]\n",
      "loss: 2.189952  [12726/30013]\n",
      "loss: 2.103096  [26934/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.0%\n",
      "Train Epoch: 53 \t(ε = 0.84, δ = 1e-06)\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 2.025249  [  115/30013]\n",
      "loss: 2.032254  [13635/30013]\n",
      "loss: 1.995738  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.9%\n",
      "Train Epoch: 54 \t(ε = 0.85, δ = 1e-06)\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 2.051454  [  115/30013]\n",
      "loss: 1.970696  [11918/30013]\n",
      "loss: 2.032652  [26130/30013]\n",
      "Train Error: \n",
      " Accuracy: 45.6%\n",
      "Train Epoch: 55 \t(ε = 0.86, δ = 1e-06)\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 1.980034  [  155/30013]\n",
      "loss: 2.025141  [10706/30013]\n",
      "loss: 2.108414  [25929/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.6%\n",
      "Train Epoch: 56 \t(ε = 0.87, δ = 1e-06)\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 1.910153  [  116/30013]\n",
      "loss: 1.951139  [13130/30013]\n",
      "loss: 1.938378  [26934/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.7%\n",
      "Train Epoch: 57 \t(ε = 0.87, δ = 1e-06)\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 2.020793  [  153/30013]\n",
      "loss: 2.033444  [13635/30013]\n",
      "loss: 2.027482  [22914/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.9%\n",
      "Train Epoch: 58 \t(ε = 0.88, δ = 1e-06)\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 1.941120  [  148/30013]\n",
      "loss: 1.941950  [14544/30013]\n",
      "loss: 1.913020  [26733/30013]\n",
      "Train Error: \n",
      " Accuracy: 45.1%\n",
      "Train Epoch: 59 \t(ε = 0.89, δ = 1e-06)\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 1.946186  [  114/30013]\n",
      "loss: 1.886423  [13231/30013]\n",
      "loss: 1.971663  [20703/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.8%\n",
      "Train Epoch: 60 \t(ε = 0.90, δ = 1e-06)\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 43.2%, Avg loss: 2.022270 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 44.3%, Avg loss: 1.998979 \n",
      "\n",
      " Error: \n",
      " Accuracy: 44.3%  \n",
      "\n",
      "AUC value is: 0.48986178420737947\n",
      "Accuracy is: 0.4904833333333333\n",
      " Error: \n",
      " Accuracy: 44.3%  \n",
      "\n",
      "AUC value is: 0.5\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.601892  [  131/30013]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/opacus/privacy_engine.py:95: UserWarning: Secure RNG turned off. This is perfectly fine for experimentation as it allows for much faster training performance, but remember to turn it on and retrain one last time before production with ``secure_mode`` turned on.\n",
      "  warnings.warn(\n",
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/torch/nn/modules/module.py:1352: UserWarning: Using a non-full backward hook when the forward contains multiple autograd Nodes is deprecated and will be removed in future versions. This hook will be missing some grad_input. Please use register_full_backward_hook to get the documented behavior.\n",
      "  warnings.warn(\"Using a non-full backward hook when the forward contains multiple autograd Nodes \"\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loss: 4.576126  [13534/30013]\n",
      "loss: 4.546251  [22914/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.5%\n",
      "Train Epoch: 1 \t(ε = 0.15, δ = 1e-06)\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.549745  [  152/30013]\n",
      "loss: 4.527291  [11615/30013]\n",
      "loss: 4.486506  [29346/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.8%\n",
      "Train Epoch: 2 \t(ε = 0.21, δ = 1e-06)\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 4.513391  [  128/30013]\n",
      "loss: 4.422077  [13029/30013]\n",
      "loss: 4.472872  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.8%\n",
      "Train Epoch: 3 \t(ε = 0.25, δ = 1e-06)\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 4.483537  [  119/30013]\n",
      "loss: 4.464777  [14847/30013]\n",
      "loss: 4.461714  [24120/30013]\n",
      "Train Error: \n",
      " Accuracy: 3.0%\n",
      "Train Epoch: 4 \t(ε = 0.29, δ = 1e-06)\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 4.449330  [  116/30013]\n",
      "loss: 4.419777  [12120/30013]\n",
      "loss: 4.363945  [28341/30013]\n",
      "Train Error: \n",
      " Accuracy: 3.2%\n",
      "Train Epoch: 5 \t(ε = 0.32, δ = 1e-06)\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 4.380258  [  123/30013]\n",
      "loss: 4.336188  [14544/30013]\n",
      "loss: 4.365045  [26733/30013]\n",
      "Train Error: \n",
      " Accuracy: 3.5%\n",
      "Train Epoch: 6 \t(ε = 0.35, δ = 1e-06)\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 4.311449  [  128/30013]\n",
      "loss: 4.301056  [11514/30013]\n",
      "loss: 4.331747  [23115/30013]\n",
      "Train Error: \n",
      " Accuracy: 4.7%\n",
      "Train Epoch: 7 \t(ε = 0.38, δ = 1e-06)\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 4.297672  [  137/30013]\n",
      "loss: 4.295655  [12120/30013]\n",
      "loss: 4.192678  [23718/30013]\n",
      "Train Error: \n",
      " Accuracy: 6.8%\n",
      "Train Epoch: 8 \t(ε = 0.41, δ = 1e-06)\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 4.210935  [  144/30013]\n",
      "loss: 4.211128  [11817/30013]\n",
      "loss: 4.119606  [21306/30013]\n",
      "Train Error: \n",
      " Accuracy: 7.9%\n",
      "Train Epoch: 9 \t(ε = 0.44, δ = 1e-06)\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 4.037663  [  136/30013]\n",
      "loss: 4.119329  [13433/30013]\n",
      "loss: 3.995182  [29547/30013]\n",
      "Train Error: \n",
      " Accuracy: 9.5%\n",
      "Train Epoch: 10 \t(ε = 0.46, δ = 1e-06)\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 3.987143  [  141/30013]\n",
      "loss: 3.921607  [14342/30013]\n",
      "loss: 3.892353  [23316/30013]\n",
      "Train Error: \n",
      " Accuracy: 10.8%\n",
      "Train Epoch: 11 \t(ε = 0.48, δ = 1e-06)\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 3.927246  [  144/30013]\n",
      "loss: 3.817866  [11918/30013]\n",
      "loss: 3.822050  [25125/30013]\n",
      "Train Error: \n",
      " Accuracy: 11.8%\n",
      "Train Epoch: 12 \t(ε = 0.51, δ = 1e-06)\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 3.730881  [  111/30013]\n",
      "loss: 3.799310  [13231/30013]\n",
      "loss: 3.620813  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 13.4%\n",
      "Train Epoch: 13 \t(ε = 0.53, δ = 1e-06)\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 3.653106  [  136/30013]\n",
      "loss: 3.587357  [13433/30013]\n",
      "loss: 3.613160  [24723/30013]\n",
      "Train Error: \n",
      " Accuracy: 15.9%\n",
      "Train Epoch: 14 \t(ε = 0.55, δ = 1e-06)\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 3.509453  [  134/30013]\n",
      "loss: 3.561448  [13938/30013]\n",
      "loss: 3.430558  [23517/30013]\n",
      "Train Error: \n",
      " Accuracy: 17.4%\n",
      "Train Epoch: 15 \t(ε = 0.57, δ = 1e-06)\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 3.488276  [  138/30013]\n",
      "loss: 3.284139  [12019/30013]\n",
      "loss: 3.428448  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 18.3%\n",
      "Train Epoch: 16 \t(ε = 0.59, δ = 1e-06)\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 3.394204  [  137/30013]\n",
      "loss: 3.301222  [11413/30013]\n",
      "loss: 3.209799  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 20.6%\n",
      "Train Epoch: 17 \t(ε = 0.60, δ = 1e-06)\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 3.371376  [  100/30013]\n",
      "loss: 3.341122  [12726/30013]\n",
      "loss: 3.089498  [26934/30013]\n",
      "Train Error: \n",
      " Accuracy: 23.7%\n",
      "Train Epoch: 18 \t(ε = 0.62, δ = 1e-06)\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 3.064596  [  118/30013]\n",
      "loss: 3.168261  [14140/30013]\n",
      "loss: 2.956059  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 25.1%\n",
      "Train Epoch: 19 \t(ε = 0.64, δ = 1e-06)\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 3.109819  [  140/30013]\n",
      "loss: 2.998594  [12928/30013]\n",
      "loss: 2.992099  [24723/30013]\n",
      "Train Error: \n",
      " Accuracy: 26.2%\n",
      "Train Epoch: 20 \t(ε = 0.66, δ = 1e-06)\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 3.027256  [  129/30013]\n",
      "loss: 2.982432  [14645/30013]\n",
      "loss: 3.013047  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 28.1%\n",
      "Train Epoch: 21 \t(ε = 0.67, δ = 1e-06)\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 2.883001  [  123/30013]\n",
      "loss: 2.942265  [14645/30013]\n",
      "loss: 2.996390  [20703/30013]\n",
      "Train Error: \n",
      " Accuracy: 30.2%\n",
      "Train Epoch: 22 \t(ε = 0.69, δ = 1e-06)\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 2.849895  [  140/30013]\n",
      "loss: 2.833234  [12120/30013]\n",
      "loss: 2.867293  [29748/30013]\n",
      "Train Error: \n",
      " Accuracy: 31.4%\n",
      "Train Epoch: 23 \t(ε = 0.71, δ = 1e-06)\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 2.666740  [  120/30013]\n",
      "loss: 2.759266  [12019/30013]\n",
      "loss: 2.646782  [27135/30013]\n",
      "Train Error: \n",
      " Accuracy: 32.8%\n",
      "Train Epoch: 24 \t(ε = 0.72, δ = 1e-06)\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 2.739655  [  142/30013]\n",
      "loss: 2.561789  [12322/30013]\n",
      "loss: 2.600752  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 33.5%\n",
      "Train Epoch: 25 \t(ε = 0.74, δ = 1e-06)\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 2.787874  [  113/30013]\n",
      "loss: 2.649915  [15453/30013]\n",
      "loss: 2.707132  [21909/30013]\n",
      "Train Error: \n",
      " Accuracy: 35.3%\n",
      "Train Epoch: 26 \t(ε = 0.75, δ = 1e-06)\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 2.664801  [  115/30013]\n",
      "loss: 2.476717  [14039/30013]\n",
      "loss: 2.480180  [26130/30013]\n",
      "Train Error: \n",
      " Accuracy: 36.7%\n",
      "Train Epoch: 27 \t(ε = 0.77, δ = 1e-06)\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 2.649512  [  138/30013]\n",
      "loss: 2.604197  [14645/30013]\n",
      "loss: 2.426899  [29346/30013]\n",
      "Train Error: \n",
      " Accuracy: 37.5%\n",
      "Train Epoch: 28 \t(ε = 0.78, δ = 1e-06)\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 2.455147  [  135/30013]\n",
      "loss: 2.501501  [11716/30013]\n",
      "loss: 2.477983  [27336/30013]\n",
      "Train Error: \n",
      " Accuracy: 38.5%\n",
      "Train Epoch: 29 \t(ε = 0.80, δ = 1e-06)\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 2.495500  [  142/30013]\n",
      "loss: 2.387636  [13231/30013]\n",
      "loss: 2.370576  [24723/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.2%\n",
      "Train Epoch: 30 \t(ε = 0.81, δ = 1e-06)\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 2.402230  [  134/30013]\n",
      "loss: 2.303981  [12827/30013]\n",
      "loss: 2.309063  [30351/30013]\n",
      "Train Error: \n",
      " Accuracy: 40.9%\n",
      "Train Epoch: 31 \t(ε = 0.83, δ = 1e-06)\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 2.459158  [  122/30013]\n",
      "loss: 2.402570  [11514/30013]\n",
      "loss: 2.281970  [29748/30013]\n",
      "Train Error: \n",
      " Accuracy: 40.9%\n",
      "Train Epoch: 32 \t(ε = 0.84, δ = 1e-06)\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 2.281117  [  120/30013]\n",
      "loss: 2.270575  [13029/30013]\n",
      "loss: 2.292682  [28944/30013]\n",
      "Train Error: \n",
      " Accuracy: 41.7%\n",
      "Train Epoch: 33 \t(ε = 0.85, δ = 1e-06)\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 2.291656  [  124/30013]\n",
      "loss: 2.219114  [13332/30013]\n",
      "loss: 2.160485  [22914/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.7%\n",
      "Train Epoch: 34 \t(ε = 0.87, δ = 1e-06)\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 2.226859  [  110/30013]\n",
      "loss: 2.136117  [14342/30013]\n",
      "loss: 2.116168  [27135/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.6%\n",
      "Train Epoch: 35 \t(ε = 0.88, δ = 1e-06)\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 2.246968  [  115/30013]\n",
      "loss: 2.168958  [11514/30013]\n",
      "loss: 2.225588  [24723/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.8%\n",
      "Train Epoch: 36 \t(ε = 0.89, δ = 1e-06)\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 2.315457  [  133/30013]\n",
      "loss: 2.194574  [12423/30013]\n",
      "loss: 2.090262  [24321/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.5%\n",
      "Train Epoch: 37 \t(ε = 0.91, δ = 1e-06)\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 2.010910  [  119/30013]\n",
      "loss: 2.159939  [11817/30013]\n",
      "loss: 2.268437  [24321/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.8%\n",
      "Train Epoch: 38 \t(ε = 0.92, δ = 1e-06)\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 2.110427  [  144/30013]\n",
      "loss: 2.076675  [12524/30013]\n",
      "loss: 2.110060  [25125/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.4%\n",
      "Train Epoch: 39 \t(ε = 0.93, δ = 1e-06)\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 2.035884  [  126/30013]\n",
      "loss: 1.994199  [13837/30013]\n",
      "loss: 2.124418  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.1%\n",
      "Train Epoch: 40 \t(ε = 0.95, δ = 1e-06)\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 1.911550  [  117/30013]\n",
      "loss: 1.954117  [11312/30013]\n",
      "loss: 2.010134  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.5%\n",
      "Train Epoch: 41 \t(ε = 0.96, δ = 1e-06)\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 2.059386  [  126/30013]\n",
      "loss: 2.082637  [11514/30013]\n",
      "loss: 2.141535  [28944/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.1%\n",
      "Train Epoch: 42 \t(ε = 0.97, δ = 1e-06)\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 2.023418  [  132/30013]\n",
      "loss: 1.867394  [12625/30013]\n",
      "loss: 1.986206  [25125/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.5%\n",
      "Train Epoch: 43 \t(ε = 0.98, δ = 1e-06)\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 1.991914  [  124/30013]\n",
      "loss: 1.978446  [14645/30013]\n",
      "loss: 2.015303  [24321/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.4%\n",
      "Train Epoch: 44 \t(ε = 0.99, δ = 1e-06)\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 1.978040  [  113/30013]\n",
      "loss: 1.881077  [12928/30013]\n",
      "loss: 2.015709  [25125/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.6%\n",
      "Train Epoch: 45 \t(ε = 1.01, δ = 1e-06)\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 1.854476  [  127/30013]\n",
      "loss: 1.885004  [14039/30013]\n",
      "loss: 1.832165  [23718/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.2%\n",
      "Train Epoch: 46 \t(ε = 1.02, δ = 1e-06)\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 2.047408  [  126/30013]\n",
      "loss: 1.917341  [13635/30013]\n",
      "loss: 1.943773  [26331/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.6%\n",
      "Train Epoch: 47 \t(ε = 1.03, δ = 1e-06)\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 1.874976  [  121/30013]\n",
      "loss: 1.919677  [12221/30013]\n",
      "loss: 1.957015  [21909/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.7%\n",
      "Train Epoch: 48 \t(ε = 1.04, δ = 1e-06)\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 1.940155  [  131/30013]\n",
      "loss: 1.830310  [14948/30013]\n",
      "loss: 1.746329  [24723/30013]\n",
      "Train Error: \n",
      " Accuracy: 49.8%\n",
      "Train Epoch: 49 \t(ε = 1.05, δ = 1e-06)\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 1.860153  [  108/30013]\n",
      "loss: 1.908099  [12524/30013]\n",
      "loss: 1.828392  [28743/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.4%\n",
      "Train Epoch: 50 \t(ε = 1.06, δ = 1e-06)\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 1.936086  [  119/30013]\n",
      "loss: 1.811162  [14746/30013]\n",
      "loss: 1.825504  [26130/30013]\n",
      "Train Error: \n",
      " Accuracy: 49.3%\n",
      "Train Epoch: 51 \t(ε = 1.07, δ = 1e-06)\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 1.874255  [  136/30013]\n",
      "loss: 1.776938  [14241/30013]\n",
      "loss: 1.781126  [30753/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.1%\n",
      "Train Epoch: 52 \t(ε = 1.09, δ = 1e-06)\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 1.758593  [  105/30013]\n",
      "loss: 1.695280  [11918/30013]\n",
      "loss: 1.878438  [28944/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.2%\n",
      "Train Epoch: 53 \t(ε = 1.10, δ = 1e-06)\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 1.884357  [  126/30013]\n",
      "loss: 1.728068  [13130/30013]\n",
      "loss: 1.835306  [26733/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.9%\n",
      "Train Epoch: 54 \t(ε = 1.11, δ = 1e-06)\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 1.678507  [  156/30013]\n",
      "loss: 1.740620  [12423/30013]\n",
      "loss: 1.765021  [24924/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.8%\n",
      "Train Epoch: 55 \t(ε = 1.12, δ = 1e-06)\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 1.811905  [  136/30013]\n",
      "loss: 1.783348  [10504/30013]\n",
      "loss: 1.849826  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 52.1%\n",
      "Train Epoch: 56 \t(ε = 1.13, δ = 1e-06)\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 1.739589  [  118/30013]\n",
      "loss: 1.803506  [15352/30013]\n",
      "loss: 1.830129  [27135/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.4%\n",
      "Train Epoch: 57 \t(ε = 1.14, δ = 1e-06)\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 1.729311  [  138/30013]\n",
      "loss: 1.749986  [13534/30013]\n",
      "loss: 1.779315  [26532/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.8%\n",
      "Train Epoch: 58 \t(ε = 1.15, δ = 1e-06)\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 1.774587  [  122/30013]\n",
      "loss: 1.676803  [11009/30013]\n",
      "loss: 1.820750  [25125/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.8%\n",
      "Train Epoch: 59 \t(ε = 1.16, δ = 1e-06)\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 1.776173  [  150/30013]\n",
      "loss: 1.740180  [11918/30013]\n",
      "loss: 1.666407  [27135/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.8%\n",
      "Train Epoch: 60 \t(ε = 1.17, δ = 1e-06)\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 49.3%, Avg loss: 1.756458 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 50.4%, Avg loss: 1.730508 \n",
      "\n",
      " Error: \n",
      " Accuracy: 50.4%  \n",
      "\n",
      "AUC value is: 0.49051047988474566\n",
      "Accuracy is: 0.49051666666666666\n",
      " Error: \n",
      " Accuracy: 50.4%  \n",
      "\n",
      "AUC value is: 0.5\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.609723  [  117/30013]\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/opacus/privacy_engine.py:95: UserWarning: Secure RNG turned off. This is perfectly fine for experimentation as it allows for much faster training performance, but remember to turn it on and retrain one last time before production with ``secure_mode`` turned on.\n",
      "  warnings.warn(\n",
      "/home/ecpkn/.conda/envs/opacus/lib/python3.8/site-packages/torch/nn/modules/module.py:1352: UserWarning: Using a non-full backward hook when the forward contains multiple autograd Nodes is deprecated and will be removed in future versions. This hook will be missing some grad_input. Please use register_full_backward_hook to get the documented behavior.\n",
      "  warnings.warn(\"Using a non-full backward hook when the forward contains multiple autograd Nodes \"\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "loss: 4.475441  [14443/30013]\n",
      "loss: 4.406865  [30150/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.9%\n",
      "Train Epoch: 1 \t(ε = 12.07, δ = 1e-06)\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.357048  [  111/30013]\n",
      "loss: 4.212024  [13534/30013]\n",
      "loss: 4.065628  [25929/30013]\n",
      "Train Error: \n",
      " Accuracy: 6.8%\n",
      "Train Epoch: 2 \t(ε = 14.32, δ = 1e-06)\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 3.966015  [  142/30013]\n",
      "loss: 3.678284  [12423/30013]\n",
      "loss: 3.351640  [26934/30013]\n",
      "Train Error: \n",
      " Accuracy: 15.2%\n",
      "Train Epoch: 3 \t(ε = 16.06, δ = 1e-06)\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 3.178241  [  120/30013]\n",
      "loss: 3.110964  [13635/30013]\n",
      "loss: 2.726615  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 24.9%\n",
      "Train Epoch: 4 \t(ε = 17.54, δ = 1e-06)\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 2.847221  [  134/30013]\n",
      "loss: 2.692949  [12928/30013]\n",
      "loss: 2.441057  [24522/30013]\n",
      "Train Error: \n",
      " Accuracy: 34.1%\n",
      "Train Epoch: 5 \t(ε = 18.86, δ = 1e-06)\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 2.447402  [  121/30013]\n",
      "loss: 2.333676  [14241/30013]\n",
      "loss: 2.142146  [26532/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.3%\n",
      "Train Epoch: 6 \t(ε = 20.08, δ = 1e-06)\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 2.134546  [  133/30013]\n",
      "loss: 1.997457  [13938/30013]\n",
      "loss: 1.915885  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.1%\n",
      "Train Epoch: 7 \t(ε = 21.21, δ = 1e-06)\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.938086  [  140/30013]\n",
      "loss: 1.798264  [13332/30013]\n",
      "loss: 1.735807  [26733/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.8%\n",
      "Train Epoch: 8 \t(ε = 22.28, δ = 1e-06)\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.816302  [  127/30013]\n",
      "loss: 1.585017  [11918/30013]\n",
      "loss: 1.584195  [25929/30013]\n",
      "Train Error: \n",
      " Accuracy: 55.7%\n",
      "Train Epoch: 9 \t(ε = 23.30, δ = 1e-06)\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.585257  [  132/30013]\n",
      "loss: 1.609652  [13029/30013]\n",
      "loss: 1.429070  [24924/30013]\n",
      "Train Error: \n",
      " Accuracy: 58.7%\n",
      "Train Epoch: 10 \t(ε = 24.27, δ = 1e-06)\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.464833  [  133/30013]\n",
      "loss: 1.520512  [12726/30013]\n",
      "loss: 1.571637  [23316/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.0%\n",
      "Train Epoch: 11 \t(ε = 25.21, δ = 1e-06)\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.418067  [  141/30013]\n",
      "loss: 1.262321  [12625/30013]\n",
      "loss: 1.232548  [26130/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.7%\n",
      "Train Epoch: 12 \t(ε = 26.11, δ = 1e-06)\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.506104  [  136/30013]\n",
      "loss: 1.252195  [14746/30013]\n",
      "loss: 1.147000  [24924/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.6%\n",
      "Train Epoch: 13 \t(ε = 26.99, δ = 1e-06)\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.200509  [  108/30013]\n",
      "loss: 1.127348  [13837/30013]\n",
      "loss: 1.179690  [26331/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.8%\n",
      "Train Epoch: 14 \t(ε = 27.85, δ = 1e-06)\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.105892  [  128/30013]\n",
      "loss: 1.317083  [13534/30013]\n",
      "loss: 1.110666  [24120/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.0%\n",
      "Train Epoch: 15 \t(ε = 28.68, δ = 1e-06)\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.205970  [  138/30013]\n",
      "loss: 1.147031  [13837/30013]\n",
      "loss: 0.995436  [26130/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.4%\n",
      "Train Epoch: 16 \t(ε = 29.49, δ = 1e-06)\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.986004  [  141/30013]\n",
      "loss: 1.236302  [11413/30013]\n",
      "loss: 1.076918  [28341/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.9%\n",
      "Train Epoch: 17 \t(ε = 30.29, δ = 1e-06)\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.043106  [  117/30013]\n",
      "loss: 1.062350  [14544/30013]\n",
      "loss: 1.086561  [27939/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.1%\n",
      "Train Epoch: 18 \t(ε = 31.06, δ = 1e-06)\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 1.024544  [  136/30013]\n",
      "loss: 1.100553  [12322/30013]\n",
      "loss: 0.912826  [23718/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.2%\n",
      "Train Epoch: 19 \t(ε = 31.83, δ = 1e-06)\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 1.006302  [  136/30013]\n",
      "loss: 1.085717  [15352/30013]\n",
      "loss: 0.919743  [27738/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.8%\n",
      "Train Epoch: 20 \t(ε = 32.58, δ = 1e-06)\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.068752  [  135/30013]\n",
      "loss: 0.972990  [13130/30013]\n",
      "loss: 1.058019  [22512/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.8%\n",
      "Train Epoch: 21 \t(ε = 33.32, δ = 1e-06)\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.910299  [  112/30013]\n",
      "loss: 0.883251  [10302/30013]\n",
      "loss: 0.973645  [27336/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.3%\n",
      "Train Epoch: 22 \t(ε = 34.04, δ = 1e-06)\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.978731  [  131/30013]\n",
      "loss: 0.826490  [14544/30013]\n",
      "loss: 0.931696  [28341/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.8%\n",
      "Train Epoch: 23 \t(ε = 34.76, δ = 1e-06)\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.899658  [  113/30013]\n",
      "loss: 0.886969  [11817/30013]\n",
      "loss: 0.880793  [24321/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.4%\n",
      "Train Epoch: 24 \t(ε = 35.46, δ = 1e-06)\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 1.011427  [  113/30013]\n",
      "loss: 0.947454  [12625/30013]\n",
      "loss: 0.962012  [26733/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.1%\n",
      "Train Epoch: 25 \t(ε = 36.16, δ = 1e-06)\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.909023  [  141/30013]\n",
      "loss: 0.816532  [13837/30013]\n",
      "loss: 0.777656  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.0%\n",
      "Train Epoch: 26 \t(ε = 36.84, δ = 1e-06)\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.749427  [  129/30013]\n",
      "loss: 0.930955  [14241/30013]\n",
      "loss: 0.763281  [26331/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.3%\n",
      "Train Epoch: 27 \t(ε = 37.52, δ = 1e-06)\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.804524  [  122/30013]\n",
      "loss: 0.728175  [13332/30013]\n",
      "loss: 0.843476  [22512/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.5%\n",
      "Train Epoch: 28 \t(ε = 38.19, δ = 1e-06)\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.866240  [  127/30013]\n",
      "loss: 0.779840  [12524/30013]\n",
      "loss: 0.751480  [27939/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.5%\n",
      "Train Epoch: 29 \t(ε = 38.86, δ = 1e-06)\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.879314  [  141/30013]\n",
      "loss: 0.810646  [15554/30013]\n",
      "loss: 0.714387  [22713/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.9%\n",
      "Train Epoch: 30 \t(ε = 39.51, δ = 1e-06)\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.848295  [  129/30013]\n",
      "loss: 0.841005  [15150/30013]\n",
      "loss: 0.855602  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.8%\n",
      "Train Epoch: 31 \t(ε = 40.16, δ = 1e-06)\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.887129  [  136/30013]\n",
      "loss: 0.703423  [14342/30013]\n",
      "loss: 0.851317  [25527/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.4%\n",
      "Train Epoch: 32 \t(ε = 40.80, δ = 1e-06)\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.782071  [  132/30013]\n",
      "loss: 0.813565  [14039/30013]\n",
      "loss: 0.819578  [24924/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.2%\n",
      "Train Epoch: 33 \t(ε = 41.44, δ = 1e-06)\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.847092  [  140/30013]\n",
      "loss: 0.674771  [12726/30013]\n",
      "loss: 0.731768  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.0%\n",
      "Train Epoch: 34 \t(ε = 42.07, δ = 1e-06)\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.907773  [  120/30013]\n",
      "loss: 0.844010  [11514/30013]\n",
      "loss: 0.709132  [23115/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.6%\n",
      "Train Epoch: 35 \t(ε = 42.69, δ = 1e-06)\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.663595  [  114/30013]\n",
      "loss: 0.777518  [14746/30013]\n",
      "loss: 0.766493  [26934/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.7%\n",
      "Train Epoch: 36 \t(ε = 43.31, δ = 1e-06)\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.646825  [  122/30013]\n",
      "loss: 0.770102  [14443/30013]\n",
      "loss: 0.746955  [23316/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.2%\n",
      "Train Epoch: 37 \t(ε = 43.92, δ = 1e-06)\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.814986  [  121/30013]\n",
      "loss: 0.632919  [14039/30013]\n",
      "loss: 0.745404  [25125/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.3%\n",
      "Train Epoch: 38 \t(ε = 44.53, δ = 1e-06)\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.782050  [  131/30013]\n",
      "loss: 0.633809  [13130/30013]\n",
      "loss: 0.561965  [23115/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.3%\n",
      "Train Epoch: 39 \t(ε = 45.14, δ = 1e-06)\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.785718  [  140/30013]\n",
      "loss: 0.705443  [12928/30013]\n",
      "loss: 0.773757  [23718/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.4%\n",
      "Train Epoch: 40 \t(ε = 45.74, δ = 1e-06)\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.713456  [  124/30013]\n",
      "loss: 0.801734  [14241/30013]\n",
      "loss: 0.638608  [25326/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.6%\n",
      "Train Epoch: 41 \t(ε = 46.33, δ = 1e-06)\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.629047  [  121/30013]\n",
      "loss: 0.655985  [12524/30013]\n",
      "loss: 0.701255  [28542/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.8%\n",
      "Train Epoch: 42 \t(ε = 46.92, δ = 1e-06)\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.633159  [  135/30013]\n",
      "loss: 0.806692  [13130/30013]\n",
      "loss: 0.731381  [23718/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.8%\n",
      "Train Epoch: 43 \t(ε = 47.51, δ = 1e-06)\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.729745  [  141/30013]\n",
      "loss: 0.698590  [11716/30013]\n",
      "loss: 0.677484  [23919/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.4%\n",
      "Train Epoch: 44 \t(ε = 48.09, δ = 1e-06)\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.693374  [  131/30013]\n",
      "loss: 0.600578  [12625/30013]\n",
      "loss: 0.716797  [22713/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.0%\n",
      "Train Epoch: 45 \t(ε = 48.67, δ = 1e-06)\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.678642  [  111/30013]\n",
      "loss: 0.789534  [14241/30013]\n",
      "loss: 0.658879  [29346/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.0%\n",
      "Train Epoch: 46 \t(ε = 49.25, δ = 1e-06)\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.781973  [  117/30013]\n",
      "loss: 0.689189  [14645/30013]\n",
      "loss: 0.712788  [27939/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.5%\n",
      "Train Epoch: 47 \t(ε = 49.82, δ = 1e-06)\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.562405  [  122/30013]\n",
      "loss: 0.552025  [10706/30013]\n",
      "loss: 0.666214  [23718/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.5%\n",
      "Train Epoch: 48 \t(ε = 50.39, δ = 1e-06)\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.523882  [  129/30013]\n",
      "loss: 0.750632  [12120/30013]\n",
      "loss: 0.575853  [27738/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.7%\n",
      "Train Epoch: 49 \t(ε = 50.96, δ = 1e-06)\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.682828  [  119/30013]\n",
      "loss: 0.664996  [12524/30013]\n",
      "loss: 0.502985  [27336/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.4%\n",
      "Train Epoch: 50 \t(ε = 51.52, δ = 1e-06)\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.603009  [  135/30013]\n",
      "loss: 0.677727  [13736/30013]\n",
      "loss: 0.708973  [25527/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.9%\n",
      "Train Epoch: 51 \t(ε = 52.08, δ = 1e-06)\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.717388  [  120/30013]\n",
      "loss: 0.593028  [10807/30013]\n",
      "loss: 0.429367  [29145/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.6%\n",
      "Train Epoch: 52 \t(ε = 52.63, δ = 1e-06)\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.642912  [  118/30013]\n",
      "loss: 0.541039  [13837/30013]\n",
      "loss: 0.627860  [29145/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.4%\n",
      "Train Epoch: 53 \t(ε = 53.19, δ = 1e-06)\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.481296  [  132/30013]\n",
      "loss: 0.475512  [13534/30013]\n",
      "loss: 0.622222  [27939/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.0%\n",
      "Train Epoch: 54 \t(ε = 53.74, δ = 1e-06)\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.623088  [  149/30013]\n",
      "loss: 0.666741  [13433/30013]\n",
      "loss: 0.668281  [25527/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.1%\n",
      "Train Epoch: 55 \t(ε = 54.28, δ = 1e-06)\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.605013  [  114/30013]\n",
      "loss: 0.616871  [11413/30013]\n",
      "loss: 0.626631  [24522/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.4%\n",
      "Train Epoch: 56 \t(ε = 54.83, δ = 1e-06)\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.523891  [  118/30013]\n",
      "loss: 0.644831  [10706/30013]\n",
      "loss: 0.534803  [29949/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.4%\n",
      "Train Epoch: 57 \t(ε = 55.37, δ = 1e-06)\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.451230  [  149/30013]\n",
      "loss: 0.642923  [11918/30013]\n",
      "loss: 0.548128  [28542/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.5%\n",
      "Train Epoch: 58 \t(ε = 55.91, δ = 1e-06)\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.625186  [  114/30013]\n",
      "loss: 0.754910  [12120/30013]\n",
      "loss: 0.711328  [24321/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.9%\n",
      "Train Epoch: 59 \t(ε = 56.45, δ = 1e-06)\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.523546  [  119/30013]\n",
      "loss: 0.513205  [13332/30013]\n",
      "loss: 0.662290  [28140/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.3%\n",
      "Train Epoch: 60 \t(ε = 56.98, δ = 1e-06)\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 72.3%, Avg loss: 0.766149 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 75.9%, Avg loss: 0.672965 \n",
      "\n",
      " Error: \n",
      " Accuracy: 75.9%  \n",
      "\n",
      "AUC value is: 0.5069068085191674\n",
      "Accuracy is: 0.49056666666666665\n",
      " Error: \n",
      " Accuracy: 75.9%  \n",
      "\n",
      "AUC value is: 0.507612970873991\n",
      "Accuracy is: 0.5\n"
     ]
    }
   ],
   "source": [
    "for sigma in sigma_list:\n",
    "    x = X_data[train_keep[tar_model]]\n",
    "    y = Y_data[train_keep[tar_model]]\n",
    "    # 训练dp模型\n",
    "    train_data = CustomDataset(x, y, model_transform)\n",
    "    train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "    DPTargetModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "    DPTargetModel.to(device)\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    \n",
    "    if enable_dp:\n",
    "        privacy_engine = PrivacyEngine(secure_mode=False)\n",
    "        optimizer = torch.optim.Adam(DPTargetModel.parameters(), lr=LEARNING_RATE)\n",
    "        RefereDPTargetModelnceModel, optimizer, train_dataloader = privacy_engine.make_private(\n",
    "            module=DPTargetModel,\n",
    "            optimizer=optimizer,\n",
    "            data_loader=train_dataloader,\n",
    "            noise_multiplier=sigma,\n",
    "            max_grad_norm=max_grad_norm,\n",
    "        )\n",
    "    else:\n",
    "        optimizer = torch.optim.Adam(DPTargetModel.parameters(), lr=LEARNING_RATE)\n",
    "    for t in range(60):\n",
    "        print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "        if enable_dp:\n",
    "            train_DP(train_dataloader, DPTargetModel, loss_fn, optimizer, device, privacy_engine, delta, t+1)\n",
    "        else:\n",
    "            train(train_dataloader, DPTargetModel, loss_fn, optimizer, device)\n",
    "    print(\"Done!\") \n",
    "\n",
    "    epsilon = privacy_engine.accountant.get_epsilon(delta=delta)\n",
    "    epsilon_list.append(epsilon)\n",
    "    \n",
    "    success = evaluate(test_dataloader, DPTargetModel, loss_fn, device)\n",
    "    dp_test_acc.append(success)\n",
    "\n",
    "    # 基线攻击\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    pred_result = base_attack(all_dataloader, DPTargetModel, loss_fn, device)\n",
    "\n",
    "    accuracy = metrics.accuracy_score(train_keep[tar_model], pred_result)\n",
    "    dp_base_attack_acc.append(accuracy)\n",
    "\n",
    "    pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    mem_clip = train_keep[tar_model][pri_risk_rank[:attack_num]]\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    dp_part_base_attack_acc.append(accuracy)\n",
    "\n",
    "    # loss_fn = nn.CrossEntropyLoss()\n",
    "    # pred_result = base_attack(all_dataloader2, CompareModel, loss_fn, device)\n",
    "    # pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    # mem_clip = train_keep[0][pri_risk_rank[:attack_num]]\n",
    "    # accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    # print(accuracy)\n",
    "\n",
    "    # LIRA攻击\n",
    "    _, score = get_score_from_model(all_dataloader, DPTargetModel, device)\n",
    "    pred_result = LIRA_attack(train_keep, score_all, score, train_keep[tar_model])\n",
    "    accuracy = evaluate_ROC(pred_result, train_keep[tar_model], threshold=0)\n",
    "    dp_LIRA_attack_acc.append(accuracy)\n",
    "    \n",
    "    pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    mem_clip = train_keep[tar_model][pri_risk_rank[:attack_num]]\n",
    "    pred_clip = pred_clip > 0\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    dp_part_LIRA_attack_acc.append(accuracy)\n",
    "    \n",
    "    # _, score = get_score_from_model(all_dataloader2, CompareModel, device)\n",
    "    # pred_result = LIRA_attack(train_keep, score_all, score, train_keep[0])\n",
    "    # evaluate_ROC(pred_result, train_keep[0], threshold=0)\n",
    "    # pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    # mem_clip = train_keep[0][pri_risk_rank[:attack_num]]\n",
    "    # pred_clip = pred_clip > 0\n",
    "    # accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    # print(accuracy)\n",
    "\n",
    "    # 影子模型攻击\n",
    "    # 提取数据集在模型上的置信度输出\n",
    "    targetX, _ = get_model_pred(all_dataloader, DPTargetModel, device)\n",
    "    targetX = targetX.detach().cpu().numpy()\n",
    "    targetX = targetX.astype(np.float32)\n",
    "    \n",
    "    top_k = 3\n",
    "    if top_k:\n",
    "        # 仅使用概率向量的前3个值\n",
    "        targetX, _ = get_top_k_conf(top_k, targetX, targetX)\n",
    "\n",
    "    shadow_attack_data = CustomDataset(targetX, train_keep[0], attack_transform)\n",
    "    shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "    attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "    attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "      \n",
    "    accuracy = evaluate_ROC(attack_test_scores, attack_test_mem)\n",
    "    dp_shadow_attack_acc.append(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "88ef96b7-a4c8-4ec9-8b2b-a59dc2bd1647",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "id": "b6078f50-b802-453e-9099-a4446ada65b3",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.4324, 0.49265, 0.72265]"
      ]
     },
     "execution_count": 34,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_test_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "id": "cd0279ab-0fe0-4b5f-a183-27694ee6bda5",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.4904833333333333, 0.49051666666666666, 0.49056666666666665]"
      ]
     },
     "execution_count": 35,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_LIRA_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "5c26fde3-a1e4-49ef-b802-19d647cce5ca",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.5126666666666667, 0.5149166666666667, 0.5391166666666667]"
      ]
     },
     "execution_count": 36,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_base_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "e9e9c6d2-7ab6-496c-96bb-765a912f4851",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.49978333333333336, 0.49978333333333336, 0.5]"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_shadow_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "id": "f1f7f1ce-3446-43be-9fa8-b65c0df12b20",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.5105, 0.5115, 0.5135]"
      ]
     },
     "execution_count": 38,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_part_LIRA_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "58a95aba-6c99-4a62-b629-f91a38f8c48f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.52, 0.512, 0.57]"
      ]
     },
     "execution_count": 39,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_part_base_attack_acc "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "id": "45af02fe-6d08-4fd1-b3e4-0b3e200591d0",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.8980682933174924, 1.1711845255079905, 56.98008858671793]"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "epsilon_list"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "fec25b3b-216d-4dee-88fa-c281e678c294",
   "metadata": {},
   "source": [
    "### dp-pca的实现"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "2938b89e-f0f7-49b1-ac3d-94a44492171c",
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.preprocessing import Normalizer\n",
    "class dp_PCA(object):\n",
    "\n",
    "    def __init__(self, n_components, eps=None, delta=None, complete_dp=False):\n",
    "        # complete_dp为True时，直接在X上加噪音\n",
    "        # 其为False时，在选择空间上加噪音\n",
    "        self.n_components = n_components\n",
    "        self.mat_feature = None\n",
    "        self.complete_dp = complete_dp\n",
    "        self.noise_mat = None\n",
    "        if eps and eps > 0 and delta and delta > 0:\n",
    "            self.dp = True\n",
    "            self.eps = eps\n",
    "            self.delta = delta\n",
    "        else:\n",
    "            self.dp = False\n",
    "            self.eps = None\n",
    "            self.delta = None\n",
    "\n",
    "    def fit(self, X):\n",
    "        n_samples, n_features = X.shape\n",
    "        X_norm = self.norm(X)\n",
    "        if self.dp and self.complete_dp:\n",
    "            scale = (np.sqrt(2 * np.log(1.25 / self.delta)) / self.eps) ** 2\n",
    "            self.noise_mat = np.random.normal(0, scale, (n_samples, n_features))\n",
    "            X_norm += self.noise_mat\n",
    "        XTX = np.dot(np.transpose(X_norm), X_norm)\n",
    "        # 根据是否实现dp来选择是否引入噪音\n",
    "        if self.dp and not self.complete_dp:\n",
    "            scale = (np.sqrt(2 * np.log(1.25 / self.delta)) / self.eps) ** 2\n",
    "            noise_mat = np.random.normal(0, scale, (n_features, n_features))\n",
    "            # noise_mat 为对称矩阵\n",
    "            for i in range(1, n_features):\n",
    "                for j in range(0, i):\n",
    "                    noise_mat[i][j] = noise_mat[j][i]\n",
    "            XTX += noise_mat\n",
    "        # 计算特征值和特征向量\n",
    "        eigen_val, eigen_vec = np.linalg.eig(XTX)\n",
    "        # 按特征值大小排序\n",
    "        eigen_pairs = [(np.abs(eigen_val[i]), eigen_vec[:, i]) for i in range(n_features)]\n",
    "        eigen_pairs.sort(reverse=True)\n",
    "        self.mat_feature = np.array([pairs[1] for pairs in eigen_pairs[:self.n_components]])\n",
    "        return 0\n",
    "\n",
    "    def transform(self, X, noise_at_X=False):\n",
    "        if type(self.mat_feature) == type(None):\n",
    "            print('缺少拟合数据，应先进行fit')\n",
    "            return -1\n",
    "        X_norm = self.norm(X)\n",
    "        if noise_at_X and self.noise_mat is not None:\n",
    "            X_norm += self.noise_mat\n",
    "        return np.dot(X_norm, np.transpose(self.mat_feature))\n",
    "\n",
    "    def fit_transform(self, X):\n",
    "        self.fit(X)\n",
    "        return self.transform(X, noise_at_X=self.complete_dp)\n",
    "\n",
    "    def norm(self, X):\n",
    "        transformer = Normalizer()\n",
    "        X = transformer.transform(X)\n",
    "        mean = np.mean(X, axis=0)\n",
    "        return X - mean\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "12ae1084-3de2-4812-96ee-4a5346e56f57",
   "metadata": {},
   "outputs": [],
   "source": [
    "tar_model = 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "b27c434d-fa3a-4521-8359-27bdfa885a64",
   "metadata": {},
   "outputs": [],
   "source": [
    "# x = X_data[mem_label]\n",
    "# y = Y_data[mem_label]\n",
    "# X_train = mPCA.transform(x)\n",
    "# x = X_train\n",
    "# y = y\n",
    "# train_data = CustomDataset(x, y, model_transform)\n",
    "# train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "# CompareModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "# CompareModel.to(device)\n",
    "# loss_fn = nn.CrossEntropyLoss()\n",
    "# optimizer = torch.optim.Adam(CompareModel.parameters(), lr=LEARNING_RATE)\n",
    "# for t in range(epochs):\n",
    "#     print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "#     train(train_dataloader, CompareModel, loss_fn, optimizer, device)\n",
    "# print(\"Done!\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "d62529f4-4333-4d8b-8d97-7e909876de5a",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)\n",
    "X_tmp = dataframe.iloc[:, range(600)].values\n",
    "Y_tmp = np.array([i for i in dataframe.loc[:, 600]])\n",
    "x_test_data = X_tmp[90000:110000]\n",
    "y_test_data = Y_tmp[90000:110000]\n",
    "\n",
    "# x_test_data= mPCA.transform(x_test_data)\n",
    "# test_data = CustomDataset(x_test_data, y_test_data, model_transform)\n",
    "# test_dataloader = DataLoader(test_data, batch_size=batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "4bf0ec5a-a4d4-484f-baaa-146e12d6d1fc",
   "metadata": {},
   "outputs": [],
   "source": [
    "# X_data2= mPCA.transform(X_data.copy())\n",
    "# all_data2 = CustomDataset(X_data2, Y_data, model_transform)\n",
    "# all_dataloader2 = DataLoader(all_data2, batch_size=batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "21a2625e-40ca-4269-be3a-d1097af7ce0b",
   "metadata": {},
   "outputs": [],
   "source": [
    "epsilon_list = [0.5,1,2,4,6,8,10,15,20,25,30,35,40]\n",
    "attack_num = 2000\n",
    "dp_test_acc = []\n",
    "dp_LIRA_attack_acc = []\n",
    "dp_base_attack_acc = []\n",
    "dp_shadow_attack_acc = []\n",
    "dp_part_LIRA_attack_acc = []\n",
    "dp_part_base_attack_acc = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "18924814-6cbe-4d38-8a54-db8b87499e42",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.620443  [  128/30013]\n",
      "loss: 4.563607  [12928/30013]\n",
      "loss: 4.500571  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 1.7%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.090552  [  128/30013]\n",
      "loss: 4.284691  [12928/30013]\n",
      "loss: 4.283488  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 4.7%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 3.906082  [  128/30013]\n",
      "loss: 4.073536  [12928/30013]\n",
      "loss: 4.071514  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 7.4%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 3.715731  [  128/30013]\n",
      "loss: 3.922182  [12928/30013]\n",
      "loss: 3.894833  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 10.4%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 3.503021  [  128/30013]\n",
      "loss: 3.696451  [12928/30013]\n",
      "loss: 3.742172  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 13.8%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 3.272240  [  128/30013]\n",
      "loss: 3.468630  [12928/30013]\n",
      "loss: 3.516555  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 17.2%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 3.067216  [  128/30013]\n",
      "loss: 3.109591  [12928/30013]\n",
      "loss: 3.384223  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 21.4%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 2.879926  [  128/30013]\n",
      "loss: 3.035913  [12928/30013]\n",
      "loss: 3.198412  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 25.8%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 2.651019  [  128/30013]\n",
      "loss: 2.819841  [12928/30013]\n",
      "loss: 2.877664  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 30.1%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 2.419024  [  128/30013]\n",
      "loss: 2.696533  [12928/30013]\n",
      "loss: 2.704583  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 34.6%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 2.341597  [  128/30013]\n",
      "loss: 2.410293  [12928/30013]\n",
      "loss: 2.392837  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.5%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 2.133775  [  128/30013]\n",
      "loss: 2.020801  [12928/30013]\n",
      "loss: 2.245798  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.4%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.885975  [  128/30013]\n",
      "loss: 2.033985  [12928/30013]\n",
      "loss: 2.027135  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 49.2%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.720473  [  128/30013]\n",
      "loss: 1.937235  [12928/30013]\n",
      "loss: 1.700730  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.5%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.459581  [  128/30013]\n",
      "loss: 1.654812  [12928/30013]\n",
      "loss: 1.464528  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.4%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.310258  [  128/30013]\n",
      "loss: 1.496635  [12928/30013]\n",
      "loss: 1.412627  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.5%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.080173  [  128/30013]\n",
      "loss: 1.192746  [12928/30013]\n",
      "loss: 1.237883  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.4%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.933483  [  128/30013]\n",
      "loss: 1.001470  [12928/30013]\n",
      "loss: 1.029281  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.2%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.809434  [  128/30013]\n",
      "loss: 0.986607  [12928/30013]\n",
      "loss: 1.027961  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.3%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.700490  [  128/30013]\n",
      "loss: 0.727188  [12928/30013]\n",
      "loss: 0.755246  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.1%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.598409  [  128/30013]\n",
      "loss: 0.543242  [12928/30013]\n",
      "loss: 0.622515  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.7%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.598816  [  128/30013]\n",
      "loss: 0.535029  [12928/30013]\n",
      "loss: 0.485826  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.9%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.557278  [  128/30013]\n",
      "loss: 0.460308  [12928/30013]\n",
      "loss: 0.391952  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.4%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.391671  [  128/30013]\n",
      "loss: 0.416626  [12928/30013]\n",
      "loss: 0.444380  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.7%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.405554  [  128/30013]\n",
      "loss: 0.330814  [12928/30013]\n",
      "loss: 0.356430  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.9%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.434799  [  128/30013]\n",
      "loss: 0.325407  [12928/30013]\n",
      "loss: 0.322724  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.8%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.392991  [  128/30013]\n",
      "loss: 0.332617  [12928/30013]\n",
      "loss: 0.447435  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.9%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.410076  [  128/30013]\n",
      "loss: 0.420452  [12928/30013]\n",
      "loss: 0.426457  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.7%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.630728  [  128/30013]\n",
      "loss: 0.559432  [12928/30013]\n",
      "loss: 0.653431  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.4%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.675759  [  128/30013]\n",
      "loss: 0.619296  [12928/30013]\n",
      "loss: 0.576715  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.7%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.586690  [  128/30013]\n",
      "loss: 0.575275  [12928/30013]\n",
      "loss: 0.650939  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.6%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.531598  [  128/30013]\n",
      "loss: 0.490499  [12928/30013]\n",
      "loss: 0.518893  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.3%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.497577  [  128/30013]\n",
      "loss: 0.502487  [12928/30013]\n",
      "loss: 0.337916  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.3%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.415792  [  128/30013]\n",
      "loss: 0.307691  [12928/30013]\n",
      "loss: 0.253509  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.3%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.262992  [  128/30013]\n",
      "loss: 0.253051  [12928/30013]\n",
      "loss: 0.208095  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.6%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.203370  [  128/30013]\n",
      "loss: 0.157839  [12928/30013]\n",
      "loss: 0.195652  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.2%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.196332  [  128/30013]\n",
      "loss: 0.151932  [12928/30013]\n",
      "loss: 0.141035  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.145013  [  128/30013]\n",
      "loss: 0.149195  [12928/30013]\n",
      "loss: 0.094790  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.1%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.115799  [  128/30013]\n",
      "loss: 0.073768  [12928/30013]\n",
      "loss: 0.125544  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.171837  [  128/30013]\n",
      "loss: 0.191584  [12928/30013]\n",
      "loss: 0.175043  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.8%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.158130  [  128/30013]\n",
      "loss: 0.366841  [12928/30013]\n",
      "loss: 0.410647  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.537915  [  128/30013]\n",
      "loss: 1.117263  [12928/30013]\n",
      "loss: 1.392470  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 1.257123  [  128/30013]\n",
      "loss: 1.040149  [12928/30013]\n",
      "loss: 0.923611  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.8%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.945671  [  128/30013]\n",
      "loss: 0.809269  [12928/30013]\n",
      "loss: 0.511816  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.1%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.635878  [  128/30013]\n",
      "loss: 0.542402  [12928/30013]\n",
      "loss: 0.393099  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.4%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.362502  [  128/30013]\n",
      "loss: 0.304371  [12928/30013]\n",
      "loss: 0.231351  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.9%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.141533  [  128/30013]\n",
      "loss: 0.152208  [12928/30013]\n",
      "loss: 0.140481  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.2%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.140019  [  128/30013]\n",
      "loss: 0.118753  [12928/30013]\n",
      "loss: 0.101439  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.059877  [  128/30013]\n",
      "loss: 0.030261  [12928/30013]\n",
      "loss: 0.016895  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.014764  [  128/30013]\n",
      "loss: 0.010946  [12928/30013]\n",
      "loss: 0.007036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.006447  [  128/30013]\n",
      "loss: 0.005321  [12928/30013]\n",
      "loss: 0.004500  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.004762  [  128/30013]\n",
      "loss: 0.004152  [12928/30013]\n",
      "loss: 0.003750  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.003933  [  128/30013]\n",
      "loss: 0.003560  [12928/30013]\n",
      "loss: 0.003236  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.003450  [  128/30013]\n",
      "loss: 0.003129  [12928/30013]\n",
      "loss: 0.002792  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.003057  [  128/30013]\n",
      "loss: 0.002782  [12928/30013]\n",
      "loss: 0.002465  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.002750  [  128/30013]\n",
      "loss: 0.002495  [12928/30013]\n",
      "loss: 0.002207  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.002481  [  128/30013]\n",
      "loss: 0.002268  [12928/30013]\n",
      "loss: 0.001973  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.002231  [  128/30013]\n",
      "loss: 0.002074  [12928/30013]\n",
      "loss: 0.001788  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.002009  [  128/30013]\n",
      "loss: 0.001898  [12928/30013]\n",
      "loss: 0.001627  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.001837  [  128/30013]\n",
      "loss: 0.001735  [12928/30013]\n",
      "loss: 0.001472  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.001667  [  128/30013]\n",
      "loss: 0.001587  [12928/30013]\n",
      "loss: 0.001345  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.001517  [  128/30013]\n",
      "loss: 0.001443  [12928/30013]\n",
      "loss: 0.001241  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.001387  [  128/30013]\n",
      "loss: 0.001314  [12928/30013]\n",
      "loss: 0.001139  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.001275  [  128/30013]\n",
      "loss: 0.001195  [12928/30013]\n",
      "loss: 0.001039  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.001168  [  128/30013]\n",
      "loss: 0.001096  [12928/30013]\n",
      "loss: 0.000954  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.001068  [  128/30013]\n",
      "loss: 0.001004  [12928/30013]\n",
      "loss: 0.000880  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000970  [  128/30013]\n",
      "loss: 0.000913  [12928/30013]\n",
      "loss: 0.000807  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000879  [  128/30013]\n",
      "loss: 0.000824  [12928/30013]\n",
      "loss: 0.000735  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000794  [  128/30013]\n",
      "loss: 0.000744  [12928/30013]\n",
      "loss: 0.000670  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000725  [  128/30013]\n",
      "loss: 0.000670  [12928/30013]\n",
      "loss: 0.000612  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000660  [  128/30013]\n",
      "loss: 0.000607  [12928/30013]\n",
      "loss: 0.000558  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000601  [  128/30013]\n",
      "loss: 0.000549  [12928/30013]\n",
      "loss: 0.000508  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000546  [  128/30013]\n",
      "loss: 0.000498  [12928/30013]\n",
      "loss: 0.000460  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000492  [  128/30013]\n",
      "loss: 0.000450  [12928/30013]\n",
      "loss: 0.000417  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000444  [  128/30013]\n",
      "loss: 0.000405  [12928/30013]\n",
      "loss: 0.000376  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000402  [  128/30013]\n",
      "loss: 0.000365  [12928/30013]\n",
      "loss: 0.000339  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000364  [  128/30013]\n",
      "loss: 0.000330  [12928/30013]\n",
      "loss: 0.000306  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000328  [  128/30013]\n",
      "loss: 0.000298  [12928/30013]\n",
      "loss: 0.000278  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000297  [  128/30013]\n",
      "loss: 0.000270  [12928/30013]\n",
      "loss: 0.000252  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000268  [  128/30013]\n",
      "loss: 0.000244  [12928/30013]\n",
      "loss: 0.000228  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000242  [  128/30013]\n",
      "loss: 0.000220  [12928/30013]\n",
      "loss: 0.000205  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000219  [  128/30013]\n",
      "loss: 0.000199  [12928/30013]\n",
      "loss: 0.000184  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000197  [  128/30013]\n",
      "loss: 0.000178  [12928/30013]\n",
      "loss: 0.000166  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000178  [  128/30013]\n",
      "loss: 0.000160  [12928/30013]\n",
      "loss: 0.000150  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000160  [  128/30013]\n",
      "loss: 0.000143  [12928/30013]\n",
      "loss: 0.000135  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000144  [  128/30013]\n",
      "loss: 0.000128  [12928/30013]\n",
      "loss: 0.000121  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000129  [  128/30013]\n",
      "loss: 0.000115  [12928/30013]\n",
      "loss: 0.000109  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000116  [  128/30013]\n",
      "loss: 0.000103  [12928/30013]\n",
      "loss: 0.000098  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000104  [  128/30013]\n",
      "loss: 0.000092  [12928/30013]\n",
      "loss: 0.000087  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000093  [  128/30013]\n",
      "loss: 0.000083  [12928/30013]\n",
      "loss: 0.000078  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000083  [  128/30013]\n",
      "loss: 0.000074  [12928/30013]\n",
      "loss: 0.000069  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000074  [  128/30013]\n",
      "loss: 0.000066  [12928/30013]\n",
      "loss: 0.000062  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000066  [  128/30013]\n",
      "loss: 0.000059  [12928/30013]\n",
      "loss: 0.000055  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000059  [  128/30013]\n",
      "loss: 0.000053  [12928/30013]\n",
      "loss: 0.000049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000052  [  128/30013]\n",
      "loss: 0.000047  [12928/30013]\n",
      "loss: 0.000043  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000047  [  128/30013]\n",
      "loss: 0.000042  [12928/30013]\n",
      "loss: 0.000038  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000041  [  128/30013]\n",
      "loss: 0.000037  [12928/30013]\n",
      "loss: 0.000034  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000037  [  128/30013]\n",
      "loss: 0.000033  [12928/30013]\n",
      "loss: 0.000031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000033  [  128/30013]\n",
      "loss: 0.000030  [12928/30013]\n",
      "loss: 0.000027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000029  [  128/30013]\n",
      "loss: 0.000026  [12928/30013]\n",
      "loss: 0.000024  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 2.6%, Avg loss: 16.263693 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 2.8%, Avg loss: 16.256116 \n",
      "\n",
      " Error: \n",
      " Accuracy: 2.8%  \n",
      "\n",
      "AUC value is: 0.4903706776362717\n",
      "Accuracy is: 0.4905\n",
      " Error: \n",
      " Accuracy: 2.8%  \n",
      "\n",
      "AUC value is: 0.4998244010781376\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.612126  [  128/30013]\n",
      "loss: 4.573461  [12928/30013]\n",
      "loss: 4.576478  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 1.9%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.019527  [  128/30013]\n",
      "loss: 4.123963  [12928/30013]\n",
      "loss: 4.312266  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 6.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 3.711920  [  128/30013]\n",
      "loss: 3.827510  [12928/30013]\n",
      "loss: 3.873335  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 12.2%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 3.303763  [  128/30013]\n",
      "loss: 3.429787  [12928/30013]\n",
      "loss: 3.327711  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 20.0%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 2.852991  [  128/30013]\n",
      "loss: 2.889963  [12928/30013]\n",
      "loss: 2.831817  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 30.5%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 2.294800  [  128/30013]\n",
      "loss: 2.384160  [12928/30013]\n",
      "loss: 2.316913  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.2%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.828077  [  128/30013]\n",
      "loss: 1.845508  [12928/30013]\n",
      "loss: 1.762218  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.0%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.322715  [  128/30013]\n",
      "loss: 1.320450  [12928/30013]\n",
      "loss: 1.219709  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.7%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.844684  [  128/30013]\n",
      "loss: 0.845248  [12928/30013]\n",
      "loss: 0.631210  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.7%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.491036  [  128/30013]\n",
      "loss: 0.493516  [12928/30013]\n",
      "loss: 0.282171  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.5%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.215227  [  128/30013]\n",
      "loss: 0.148253  [12928/30013]\n",
      "loss: 0.083471  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.042237  [  128/30013]\n",
      "loss: 0.023392  [12928/30013]\n",
      "loss: 0.016532  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.012863  [  128/30013]\n",
      "loss: 0.011545  [12928/30013]\n",
      "loss: 0.011056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.009062  [  128/30013]\n",
      "loss: 0.008716  [12928/30013]\n",
      "loss: 0.008533  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.007058  [  128/30013]\n",
      "loss: 0.006978  [12928/30013]\n",
      "loss: 0.006910  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.005714  [  128/30013]\n",
      "loss: 0.005756  [12928/30013]\n",
      "loss: 0.005745  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.004735  [  128/30013]\n",
      "loss: 0.004834  [12928/30013]\n",
      "loss: 0.004862  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.003992  [  128/30013]\n",
      "loss: 0.004109  [12928/30013]\n",
      "loss: 0.004176  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.003406  [  128/30013]\n",
      "loss: 0.003530  [12928/30013]\n",
      "loss: 0.003609  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.002931  [  128/30013]\n",
      "loss: 0.003059  [12928/30013]\n",
      "loss: 0.003129  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.002538  [  128/30013]\n",
      "loss: 0.002667  [12928/30013]\n",
      "loss: 0.002722  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.002209  [  128/30013]\n",
      "loss: 0.002335  [12928/30013]\n",
      "loss: 0.002381  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.001930  [  128/30013]\n",
      "loss: 0.002051  [12928/30013]\n",
      "loss: 0.002087  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.001693  [  128/30013]\n",
      "loss: 0.001807  [12928/30013]\n",
      "loss: 0.001833  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.001489  [  128/30013]\n",
      "loss: 0.001595  [12928/30013]\n",
      "loss: 0.001614  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.001313  [  128/30013]\n",
      "loss: 0.001411  [12928/30013]\n",
      "loss: 0.001424  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.001160  [  128/30013]\n",
      "loss: 0.001249  [12928/30013]\n",
      "loss: 0.001259  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.001025  [  128/30013]\n",
      "loss: 0.001107  [12928/30013]\n",
      "loss: 0.001114  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.000906  [  128/30013]\n",
      "loss: 0.000982  [12928/30013]\n",
      "loss: 0.000987  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.000801  [  128/30013]\n",
      "loss: 0.000871  [12928/30013]\n",
      "loss: 0.000875  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.000708  [  128/30013]\n",
      "loss: 0.000773  [12928/30013]\n",
      "loss: 0.000776  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.000626  [  128/30013]\n",
      "loss: 0.000685  [12928/30013]\n",
      "loss: 0.000689  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.000554  [  128/30013]\n",
      "loss: 0.000608  [12928/30013]\n",
      "loss: 0.000611  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.000490  [  128/30013]\n",
      "loss: 0.000539  [12928/30013]\n",
      "loss: 0.000542  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.000434  [  128/30013]\n",
      "loss: 0.000478  [12928/30013]\n",
      "loss: 0.000480  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.000384  [  128/30013]\n",
      "loss: 0.000424  [12928/30013]\n",
      "loss: 0.000425  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.000341  [  128/30013]\n",
      "loss: 0.000376  [12928/30013]\n",
      "loss: 0.000377  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.000302  [  128/30013]\n",
      "loss: 0.000333  [12928/30013]\n",
      "loss: 0.000334  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.000267  [  128/30013]\n",
      "loss: 0.000296  [12928/30013]\n",
      "loss: 0.000296  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.000237  [  128/30013]\n",
      "loss: 0.000262  [12928/30013]\n",
      "loss: 0.000263  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.000210  [  128/30013]\n",
      "loss: 0.000232  [12928/30013]\n",
      "loss: 0.000233  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.000185  [  128/30013]\n",
      "loss: 0.000206  [12928/30013]\n",
      "loss: 0.000207  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.000164  [  128/30013]\n",
      "loss: 0.000182  [12928/30013]\n",
      "loss: 0.000183  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.000145  [  128/30013]\n",
      "loss: 0.000161  [12928/30013]\n",
      "loss: 0.000162  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000128  [  128/30013]\n",
      "loss: 0.000143  [12928/30013]\n",
      "loss: 0.000143  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000114  [  128/30013]\n",
      "loss: 0.000127  [12928/30013]\n",
      "loss: 0.000127  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000101  [  128/30013]\n",
      "loss: 0.000113  [12928/30013]\n",
      "loss: 0.000112  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000089  [  128/30013]\n",
      "loss: 0.000100  [12928/30013]\n",
      "loss: 0.000099  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000079  [  128/30013]\n",
      "loss: 0.000088  [12928/30013]\n",
      "loss: 0.000088  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000070  [  128/30013]\n",
      "loss: 0.000078  [12928/30013]\n",
      "loss: 0.000078  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000062  [  128/30013]\n",
      "loss: 0.000070  [12928/30013]\n",
      "loss: 0.000069  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000054  [  128/30013]\n",
      "loss: 0.000062  [12928/30013]\n",
      "loss: 0.000061  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000048  [  128/30013]\n",
      "loss: 0.000055  [12928/30013]\n",
      "loss: 0.000054  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000042  [  128/30013]\n",
      "loss: 0.000049  [12928/30013]\n",
      "loss: 0.000048  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000037  [  128/30013]\n",
      "loss: 0.000043  [12928/30013]\n",
      "loss: 0.000042  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000033  [  128/30013]\n",
      "loss: 0.000038  [12928/30013]\n",
      "loss: 0.000038  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000029  [  128/30013]\n",
      "loss: 0.000034  [12928/30013]\n",
      "loss: 0.000033  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000026  [  128/30013]\n",
      "loss: 0.000030  [12928/30013]\n",
      "loss: 0.000029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000023  [  128/30013]\n",
      "loss: 0.000027  [12928/30013]\n",
      "loss: 0.000026  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000020  [  128/30013]\n",
      "loss: 0.000024  [12928/30013]\n",
      "loss: 0.000023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000018  [  128/30013]\n",
      "loss: 0.000021  [12928/30013]\n",
      "loss: 0.000020  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000016  [  128/30013]\n",
      "loss: 0.000019  [12928/30013]\n",
      "loss: 0.000018  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000014  [  128/30013]\n",
      "loss: 0.000017  [12928/30013]\n",
      "loss: 0.000016  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000012  [  128/30013]\n",
      "loss: 0.000015  [12928/30013]\n",
      "loss: 0.000014  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000011  [  128/30013]\n",
      "loss: 0.000013  [12928/30013]\n",
      "loss: 0.000012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: 0.000012  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/30013]\n",
      "loss: 0.000010  [12928/30013]\n",
      "loss: 0.000010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/30013]\n",
      "loss: 0.000009  [12928/30013]\n",
      "loss: 0.000009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: 0.000008  [12928/30013]\n",
      "loss: 0.000008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000007  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 2.6%, Avg loss: 12.195436 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 2.8%, Avg loss: 12.165902 \n",
      "\n",
      " Error: \n",
      " Accuracy: 2.8%  \n",
      "\n",
      "AUC value is: 0.4900580120220045\n",
      "Accuracy is: 0.4904833333333333\n",
      " Error: \n",
      " Accuracy: 2.8%  \n",
      "\n",
      "AUC value is: 0.5000816439042198\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.609512  [  128/30013]\n",
      "loss: 4.631711  [12928/30013]\n",
      "loss: 4.533552  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 1.6%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 3.934364  [  128/30013]\n",
      "loss: 4.136251  [12928/30013]\n",
      "loss: 4.148732  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 8.1%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 3.311096  [  128/30013]\n",
      "loss: 3.445337  [12928/30013]\n",
      "loss: 3.374678  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 20.4%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 2.336396  [  128/30013]\n",
      "loss: 2.476311  [12928/30013]\n",
      "loss: 2.214420  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.2%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.132892  [  128/30013]\n",
      "loss: 1.283110  [12928/30013]\n",
      "loss: 0.890869  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.1%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.367616  [  128/30013]\n",
      "loss: 0.320230  [12928/30013]\n",
      "loss: 0.174652  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.067566  [  128/30013]\n",
      "loss: 0.040177  [12928/30013]\n",
      "loss: 0.026599  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.016831  [  128/30013]\n",
      "loss: 0.016922  [12928/30013]\n",
      "loss: 0.014751  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.010308  [  128/30013]\n",
      "loss: 0.011689  [12928/30013]\n",
      "loss: 0.010675  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.007652  [  128/30013]\n",
      "loss: 0.008835  [12928/30013]\n",
      "loss: 0.008187  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.005978  [  128/30013]\n",
      "loss: 0.006953  [12928/30013]\n",
      "loss: 0.006491  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.004807  [  128/30013]\n",
      "loss: 0.005609  [12928/30013]\n",
      "loss: 0.005261  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.003939  [  128/30013]\n",
      "loss: 0.004604  [12928/30013]\n",
      "loss: 0.004332  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.003272  [  128/30013]\n",
      "loss: 0.003827  [12928/30013]\n",
      "loss: 0.003610  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.002746  [  128/30013]\n",
      "loss: 0.003212  [12928/30013]\n",
      "loss: 0.003036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.002323  [  128/30013]\n",
      "loss: 0.002716  [12928/30013]\n",
      "loss: 0.002571  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.001978  [  128/30013]\n",
      "loss: 0.002312  [12928/30013]\n",
      "loss: 0.002191  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.001693  [  128/30013]\n",
      "loss: 0.001977  [12928/30013]\n",
      "loss: 0.001876  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.001455  [  128/30013]\n",
      "loss: 0.001699  [12928/30013]\n",
      "loss: 0.001613  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.001256  [  128/30013]\n",
      "loss: 0.001465  [12928/30013]\n",
      "loss: 0.001392  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.001087  [  128/30013]\n",
      "loss: 0.001267  [12928/30013]\n",
      "loss: 0.001205  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.000943  [  128/30013]\n",
      "loss: 0.001098  [12928/30013]\n",
      "loss: 0.001045  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.000820  [  128/30013]\n",
      "loss: 0.000954  [12928/30013]\n",
      "loss: 0.000908  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.000714  [  128/30013]\n",
      "loss: 0.000831  [12928/30013]\n",
      "loss: 0.000791  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.000623  [  128/30013]\n",
      "loss: 0.000724  [12928/30013]\n",
      "loss: 0.000690  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.000544  [  128/30013]\n",
      "loss: 0.000633  [12928/30013]\n",
      "loss: 0.000603  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.000476  [  128/30013]\n",
      "loss: 0.000553  [12928/30013]\n",
      "loss: 0.000527  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.000417  [  128/30013]\n",
      "loss: 0.000484  [12928/30013]\n",
      "loss: 0.000461  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.000366  [  128/30013]\n",
      "loss: 0.000424  [12928/30013]\n",
      "loss: 0.000404  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.000321  [  128/30013]\n",
      "loss: 0.000372  [12928/30013]\n",
      "loss: 0.000354  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.000282  [  128/30013]\n",
      "loss: 0.000326  [12928/30013]\n",
      "loss: 0.000311  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.000248  [  128/30013]\n",
      "loss: 0.000286  [12928/30013]\n",
      "loss: 0.000273  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.000218  [  128/30013]\n",
      "loss: 0.000252  [12928/30013]\n",
      "loss: 0.000240  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.000191  [  128/30013]\n",
      "loss: 0.000221  [12928/30013]\n",
      "loss: 0.000211  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.000168  [  128/30013]\n",
      "loss: 0.000194  [12928/30013]\n",
      "loss: 0.000185  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.000148  [  128/30013]\n",
      "loss: 0.000171  [12928/30013]\n",
      "loss: 0.000163  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.000131  [  128/30013]\n",
      "loss: 0.000150  [12928/30013]\n",
      "loss: 0.000143  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.000115  [  128/30013]\n",
      "loss: 0.000132  [12928/30013]\n",
      "loss: 0.000126  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.000101  [  128/30013]\n",
      "loss: 0.000117  [12928/30013]\n",
      "loss: 0.000111  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.000089  [  128/30013]\n",
      "loss: 0.000103  [12928/30013]\n",
      "loss: 0.000098  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.000079  [  128/30013]\n",
      "loss: 0.000090  [12928/30013]\n",
      "loss: 0.000086  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.000070  [  128/30013]\n",
      "loss: 0.000080  [12928/30013]\n",
      "loss: 0.000076  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.000061  [  128/30013]\n",
      "loss: 0.000070  [12928/30013]\n",
      "loss: 0.000067  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.000054  [  128/30013]\n",
      "loss: 0.000062  [12928/30013]\n",
      "loss: 0.000059  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000048  [  128/30013]\n",
      "loss: 0.000054  [12928/30013]\n",
      "loss: 0.000052  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000042  [  128/30013]\n",
      "loss: 0.000048  [12928/30013]\n",
      "loss: 0.000046  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000037  [  128/30013]\n",
      "loss: 0.000042  [12928/30013]\n",
      "loss: 0.000040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000033  [  128/30013]\n",
      "loss: 0.000037  [12928/30013]\n",
      "loss: 0.000036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000029  [  128/30013]\n",
      "loss: 0.000033  [12928/30013]\n",
      "loss: 0.000031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000026  [  128/30013]\n",
      "loss: 0.000029  [12928/30013]\n",
      "loss: 0.000028  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000023  [  128/30013]\n",
      "loss: 0.000026  [12928/30013]\n",
      "loss: 0.000024  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000020  [  128/30013]\n",
      "loss: 0.000023  [12928/30013]\n",
      "loss: 0.000022  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000018  [  128/30013]\n",
      "loss: 0.000020  [12928/30013]\n",
      "loss: 0.000019  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000016  [  128/30013]\n",
      "loss: 0.000018  [12928/30013]\n",
      "loss: 0.000017  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000014  [  128/30013]\n",
      "loss: 0.000015  [12928/30013]\n",
      "loss: 0.000015  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000012  [  128/30013]\n",
      "loss: 0.000014  [12928/30013]\n",
      "loss: 0.000013  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000011  [  128/30013]\n",
      "loss: 0.000012  [12928/30013]\n",
      "loss: 0.000012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: 0.000011  [12928/30013]\n",
      "loss: 0.000010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/30013]\n",
      "loss: 0.000009  [12928/30013]\n",
      "loss: 0.000009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/30013]\n",
      "loss: 0.000008  [12928/30013]\n",
      "loss: 0.000008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: 0.000007  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 1.5%, Avg loss: 9.719001 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 1.6%, Avg loss: 9.670010 \n",
      "\n",
      " Error: \n",
      " Accuracy: 1.6%  \n",
      "\n",
      "AUC value is: 0.4900539175767912\n",
      "Accuracy is: 0.4904833333333333\n",
      " Error: \n",
      " Accuracy: 1.6%  \n",
      "\n",
      "AUC value is: 0.5\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.585384  [  128/30013]\n",
      "loss: 4.549411  [12928/30013]\n",
      "loss: 4.533925  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.0%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.087087  [  128/30013]\n",
      "loss: 4.238257  [12928/30013]\n",
      "loss: 4.324571  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 5.0%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 3.896960  [  128/30013]\n",
      "loss: 3.932145  [12928/30013]\n",
      "loss: 4.048576  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 8.9%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 3.575307  [  128/30013]\n",
      "loss: 3.554669  [12928/30013]\n",
      "loss: 3.678161  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 15.0%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 3.122325  [  128/30013]\n",
      "loss: 3.055411  [12928/30013]\n",
      "loss: 3.155499  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 25.5%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 2.523309  [  128/30013]\n",
      "loss: 2.402601  [12928/30013]\n",
      "loss: 2.424721  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 41.5%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.771040  [  128/30013]\n",
      "loss: 1.591917  [12928/30013]\n",
      "loss: 1.477735  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.3%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.952769  [  128/30013]\n",
      "loss: 0.766517  [12928/30013]\n",
      "loss: 0.563627  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.6%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.324416  [  128/30013]\n",
      "loss: 0.238517  [12928/30013]\n",
      "loss: 0.137312  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.099589  [  128/30013]\n",
      "loss: 0.065893  [12928/30013]\n",
      "loss: 0.049644  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.037866  [  128/30013]\n",
      "loss: 0.032613  [12928/30013]\n",
      "loss: 0.028128  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.022981  [  128/30013]\n",
      "loss: 0.021542  [12928/30013]\n",
      "loss: 0.019420  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.016427  [  128/30013]\n",
      "loss: 0.015722  [12928/30013]\n",
      "loss: 0.014447  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.012465  [  128/30013]\n",
      "loss: 0.012042  [12928/30013]\n",
      "loss: 0.011183  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.009790  [  128/30013]\n",
      "loss: 0.009505  [12928/30013]\n",
      "loss: 0.008887  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.007870  [  128/30013]\n",
      "loss: 0.007661  [12928/30013]\n",
      "loss: 0.007196  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.006433  [  128/30013]\n",
      "loss: 0.006270  [12928/30013]\n",
      "loss: 0.005909  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.005325  [  128/30013]\n",
      "loss: 0.005192  [12928/30013]\n",
      "loss: 0.004905  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.004451  [  128/30013]\n",
      "loss: 0.004339  [12928/30013]\n",
      "loss: 0.004108  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.003750  [  128/30013]\n",
      "loss: 0.003653  [12928/30013]\n",
      "loss: 0.003465  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.003180  [  128/30013]\n",
      "loss: 0.003094  [12928/30013]\n",
      "loss: 0.002939  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.002710  [  128/30013]\n",
      "loss: 0.002634  [12928/30013]\n",
      "loss: 0.002505  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.002320  [  128/30013]\n",
      "loss: 0.002252  [12928/30013]\n",
      "loss: 0.002145  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.001993  [  128/30013]\n",
      "loss: 0.001932  [12928/30013]\n",
      "loss: 0.001842  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.001718  [  128/30013]\n",
      "loss: 0.001662  [12928/30013]\n",
      "loss: 0.001587  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.001484  [  128/30013]\n",
      "loss: 0.001434  [12928/30013]\n",
      "loss: 0.001371  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.001285  [  128/30013]\n",
      "loss: 0.001240  [12928/30013]\n",
      "loss: 0.001186  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.001115  [  128/30013]\n",
      "loss: 0.001075  [12928/30013]\n",
      "loss: 0.001029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.000969  [  128/30013]\n",
      "loss: 0.000933  [12928/30013]\n",
      "loss: 0.000894  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.000844  [  128/30013]\n",
      "loss: 0.000811  [12928/30013]\n",
      "loss: 0.000778  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.000735  [  128/30013]\n",
      "loss: 0.000706  [12928/30013]\n",
      "loss: 0.000678  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.000641  [  128/30013]\n",
      "loss: 0.000615  [12928/30013]\n",
      "loss: 0.000591  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.000560  [  128/30013]\n",
      "loss: 0.000537  [12928/30013]\n",
      "loss: 0.000516  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.000490  [  128/30013]\n",
      "loss: 0.000469  [12928/30013]\n",
      "loss: 0.000451  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.000428  [  128/30013]\n",
      "loss: 0.000410  [12928/30013]\n",
      "loss: 0.000395  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.000375  [  128/30013]\n",
      "loss: 0.000358  [12928/30013]\n",
      "loss: 0.000345  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.000328  [  128/30013]\n",
      "loss: 0.000314  [12928/30013]\n",
      "loss: 0.000302  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.000288  [  128/30013]\n",
      "loss: 0.000275  [12928/30013]\n",
      "loss: 0.000265  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.000252  [  128/30013]\n",
      "loss: 0.000241  [12928/30013]\n",
      "loss: 0.000232  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.000221  [  128/30013]\n",
      "loss: 0.000211  [12928/30013]\n",
      "loss: 0.000204  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.000194  [  128/30013]\n",
      "loss: 0.000185  [12928/30013]\n",
      "loss: 0.000179  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.000171  [  128/30013]\n",
      "loss: 0.000163  [12928/30013]\n",
      "loss: 0.000157  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.000150  [  128/30013]\n",
      "loss: 0.000143  [12928/30013]\n",
      "loss: 0.000138  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.000132  [  128/30013]\n",
      "loss: 0.000125  [12928/30013]\n",
      "loss: 0.000121  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000116  [  128/30013]\n",
      "loss: 0.000110  [12928/30013]\n",
      "loss: 0.000107  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000102  [  128/30013]\n",
      "loss: 0.000097  [12928/30013]\n",
      "loss: 0.000094  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000089  [  128/30013]\n",
      "loss: 0.000085  [12928/30013]\n",
      "loss: 0.000082  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000079  [  128/30013]\n",
      "loss: 0.000075  [12928/30013]\n",
      "loss: 0.000072  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000069  [  128/30013]\n",
      "loss: 0.000066  [12928/30013]\n",
      "loss: 0.000064  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000061  [  128/30013]\n",
      "loss: 0.000058  [12928/30013]\n",
      "loss: 0.000056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000054  [  128/30013]\n",
      "loss: 0.000051  [12928/30013]\n",
      "loss: 0.000049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000047  [  128/30013]\n",
      "loss: 0.000045  [12928/30013]\n",
      "loss: 0.000043  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000041  [  128/30013]\n",
      "loss: 0.000040  [12928/30013]\n",
      "loss: 0.000038  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000037  [  128/30013]\n",
      "loss: 0.000035  [12928/30013]\n",
      "loss: 0.000034  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000032  [  128/30013]\n",
      "loss: 0.000031  [12928/30013]\n",
      "loss: 0.000030  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000028  [  128/30013]\n",
      "loss: 0.000027  [12928/30013]\n",
      "loss: 0.000026  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000025  [  128/30013]\n",
      "loss: 0.000024  [12928/30013]\n",
      "loss: 0.000023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000022  [  128/30013]\n",
      "loss: 0.000021  [12928/30013]\n",
      "loss: 0.000020  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000019  [  128/30013]\n",
      "loss: 0.000019  [12928/30013]\n",
      "loss: 0.000018  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000017  [  128/30013]\n",
      "loss: 0.000016  [12928/30013]\n",
      "loss: 0.000016  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000015  [  128/30013]\n",
      "loss: 0.000014  [12928/30013]\n",
      "loss: 0.000014  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000013  [  128/30013]\n",
      "loss: 0.000013  [12928/30013]\n",
      "loss: 0.000012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000012  [  128/30013]\n",
      "loss: 0.000011  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: 0.000010  [12928/30013]\n",
      "loss: 0.000010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/30013]\n",
      "loss: 0.000009  [12928/30013]\n",
      "loss: 0.000008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/30013]\n",
      "loss: 0.000008  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: 0.000007  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 2.6%, Avg loss: 11.846585 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 2.8%, Avg loss: 11.837485 \n",
      "\n",
      " Error: \n",
      " Accuracy: 2.8%  \n",
      "\n",
      "AUC value is: 0.48994639311215604\n",
      "Accuracy is: 0.4904833333333333\n",
      " Error: \n",
      " Accuracy: 2.8%  \n",
      "\n",
      "AUC value is: 0.5\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.612017  [  128/30013]\n",
      "loss: 4.483974  [12928/30013]\n",
      "loss: 4.525020  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.3%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.219968  [  128/30013]\n",
      "loss: 4.268377  [12928/30013]\n",
      "loss: 4.404608  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 4.3%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 4.124568  [  128/30013]\n",
      "loss: 4.110240  [12928/30013]\n",
      "loss: 4.248617  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 6.1%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 3.993886  [  128/30013]\n",
      "loss: 3.971663  [12928/30013]\n",
      "loss: 4.075251  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 8.4%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 3.827397  [  128/30013]\n",
      "loss: 3.816361  [12928/30013]\n",
      "loss: 3.887770  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 11.5%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 3.621889  [  128/30013]\n",
      "loss: 3.630561  [12928/30013]\n",
      "loss: 3.671462  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 15.4%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 3.373343  [  128/30013]\n",
      "loss: 3.407577  [12928/30013]\n",
      "loss: 3.415113  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 20.5%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 3.079694  [  128/30013]\n",
      "loss: 3.144445  [12928/30013]\n",
      "loss: 3.108670  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 27.5%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 2.739597  [  128/30013]\n",
      "loss: 2.827847  [12928/30013]\n",
      "loss: 2.751484  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 35.6%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 2.359484  [  128/30013]\n",
      "loss: 2.454454  [12928/30013]\n",
      "loss: 2.350828  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 45.6%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.949619  [  128/30013]\n",
      "loss: 2.036117  [12928/30013]\n",
      "loss: 1.919491  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.8%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.532154  [  128/30013]\n",
      "loss: 1.590063  [12928/30013]\n",
      "loss: 1.474853  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.0%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.135454  [  128/30013]\n",
      "loss: 1.144915  [12928/30013]\n",
      "loss: 1.045263  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.6%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.775049  [  128/30013]\n",
      "loss: 0.748282  [12928/30013]\n",
      "loss: 0.667054  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.3%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.476025  [  128/30013]\n",
      "loss: 0.441691  [12928/30013]\n",
      "loss: 0.381941  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.264546  [  128/30013]\n",
      "loss: 0.237328  [12928/30013]\n",
      "loss: 0.208660  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.147150  [  128/30013]\n",
      "loss: 0.130477  [12928/30013]\n",
      "loss: 0.117548  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.089436  [  128/30013]\n",
      "loss: 0.081550  [12928/30013]\n",
      "loss: 0.073442  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.059601  [  128/30013]\n",
      "loss: 0.056008  [12928/30013]\n",
      "loss: 0.051290  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.042728  [  128/30013]\n",
      "loss: 0.040915  [12928/30013]\n",
      "loss: 0.038077  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.032145  [  128/30013]\n",
      "loss: 0.031172  [12928/30013]\n",
      "loss: 0.029306  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.024977  [  128/30013]\n",
      "loss: 0.024426  [12928/30013]\n",
      "loss: 0.023122  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.019856  [  128/30013]\n",
      "loss: 0.019528  [12928/30013]\n",
      "loss: 0.018576  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.016055  [  128/30013]\n",
      "loss: 0.015849  [12928/30013]\n",
      "loss: 0.015131  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.013150  [  128/30013]\n",
      "loss: 0.013015  [12928/30013]\n",
      "loss: 0.012459  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.010881  [  128/30013]\n",
      "loss: 0.010789  [12928/30013]\n",
      "loss: 0.010349  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.009078  [  128/30013]\n",
      "loss: 0.009011  [12928/30013]\n",
      "loss: 0.008657  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.007624  [  128/30013]\n",
      "loss: 0.007574  [12928/30013]\n",
      "loss: 0.007285  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.006438  [  128/30013]\n",
      "loss: 0.006400  [12928/30013]\n",
      "loss: 0.006160  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.005461  [  128/30013]\n",
      "loss: 0.005431  [12928/30013]\n",
      "loss: 0.005231  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.004651  [  128/30013]\n",
      "loss: 0.004627  [12928/30013]\n",
      "loss: 0.004458  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.003975  [  128/30013]\n",
      "loss: 0.003954  [12928/30013]\n",
      "loss: 0.003811  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.003406  [  128/30013]\n",
      "loss: 0.003389  [12928/30013]\n",
      "loss: 0.003267  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.002926  [  128/30013]\n",
      "loss: 0.002912  [12928/30013]\n",
      "loss: 0.002807  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.002519  [  128/30013]\n",
      "loss: 0.002507  [12928/30013]\n",
      "loss: 0.002417  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.002173  [  128/30013]\n",
      "loss: 0.002162  [12928/30013]\n",
      "loss: 0.002085  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.001877  [  128/30013]\n",
      "loss: 0.001868  [12928/30013]\n",
      "loss: 0.001801  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.001624  [  128/30013]\n",
      "loss: 0.001617  [12928/30013]\n",
      "loss: 0.001558  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.001408  [  128/30013]\n",
      "loss: 0.001401  [12928/30013]\n",
      "loss: 0.001350  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.001221  [  128/30013]\n",
      "loss: 0.001215  [12928/30013]\n",
      "loss: 0.001171  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.001060  [  128/30013]\n",
      "loss: 0.001055  [12928/30013]\n",
      "loss: 0.001017  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.000922  [  128/30013]\n",
      "loss: 0.000917  [12928/30013]\n",
      "loss: 0.000884  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.000802  [  128/30013]\n",
      "loss: 0.000797  [12928/30013]\n",
      "loss: 0.000769  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.000698  [  128/30013]\n",
      "loss: 0.000694  [12928/30013]\n",
      "loss: 0.000669  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.000608  [  128/30013]\n",
      "loss: 0.000604  [12928/30013]\n",
      "loss: 0.000583  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.000530  [  128/30013]\n",
      "loss: 0.000527  [12928/30013]\n",
      "loss: 0.000508  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.000463  [  128/30013]\n",
      "loss: 0.000459  [12928/30013]\n",
      "loss: 0.000443  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.000404  [  128/30013]\n",
      "loss: 0.000401  [12928/30013]\n",
      "loss: 0.000387  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.000353  [  128/30013]\n",
      "loss: 0.000350  [12928/30013]\n",
      "loss: 0.000338  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.000308  [  128/30013]\n",
      "loss: 0.000306  [12928/30013]\n",
      "loss: 0.000295  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.000270  [  128/30013]\n",
      "loss: 0.000267  [12928/30013]\n",
      "loss: 0.000258  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000236  [  128/30013]\n",
      "loss: 0.000233  [12928/30013]\n",
      "loss: 0.000225  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000206  [  128/30013]\n",
      "loss: 0.000204  [12928/30013]\n",
      "loss: 0.000197  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000181  [  128/30013]\n",
      "loss: 0.000178  [12928/30013]\n",
      "loss: 0.000172  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000158  [  128/30013]\n",
      "loss: 0.000156  [12928/30013]\n",
      "loss: 0.000151  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000139  [  128/30013]\n",
      "loss: 0.000137  [12928/30013]\n",
      "loss: 0.000132  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000121  [  128/30013]\n",
      "loss: 0.000120  [12928/30013]\n",
      "loss: 0.000115  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000106  [  128/30013]\n",
      "loss: 0.000105  [12928/30013]\n",
      "loss: 0.000101  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000093  [  128/30013]\n",
      "loss: 0.000092  [12928/30013]\n",
      "loss: 0.000088  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000082  [  128/30013]\n",
      "loss: 0.000080  [12928/30013]\n",
      "loss: 0.000077  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000072  [  128/30013]\n",
      "loss: 0.000070  [12928/30013]\n",
      "loss: 0.000068  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000063  [  128/30013]\n",
      "loss: 0.000062  [12928/30013]\n",
      "loss: 0.000060  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000055  [  128/30013]\n",
      "loss: 0.000054  [12928/30013]\n",
      "loss: 0.000052  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000049  [  128/30013]\n",
      "loss: 0.000048  [12928/30013]\n",
      "loss: 0.000046  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000043  [  128/30013]\n",
      "loss: 0.000042  [12928/30013]\n",
      "loss: 0.000040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000037  [  128/30013]\n",
      "loss: 0.000037  [12928/30013]\n",
      "loss: 0.000035  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000033  [  128/30013]\n",
      "loss: 0.000032  [12928/30013]\n",
      "loss: 0.000031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000029  [  128/30013]\n",
      "loss: 0.000028  [12928/30013]\n",
      "loss: 0.000027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000025  [  128/30013]\n",
      "loss: 0.000025  [12928/30013]\n",
      "loss: 0.000024  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000022  [  128/30013]\n",
      "loss: 0.000022  [12928/30013]\n",
      "loss: 0.000021  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000020  [  128/30013]\n",
      "loss: 0.000019  [12928/30013]\n",
      "loss: 0.000018  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000017  [  128/30013]\n",
      "loss: 0.000017  [12928/30013]\n",
      "loss: 0.000016  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000015  [  128/30013]\n",
      "loss: 0.000015  [12928/30013]\n",
      "loss: 0.000014  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000013  [  128/30013]\n",
      "loss: 0.000013  [12928/30013]\n",
      "loss: 0.000012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000012  [  128/30013]\n",
      "loss: 0.000011  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: 0.000010  [12928/30013]\n",
      "loss: 0.000010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/30013]\n",
      "loss: 0.000009  [12928/30013]\n",
      "loss: 0.000008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/30013]\n",
      "loss: 0.000008  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: 0.000007  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000001  [  128/30013]\n",
      "loss: 0.000001  [12928/30013]\n",
      "loss: 0.000001  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000000  [  128/30013]\n",
      "loss: 0.000000  [12928/30013]\n",
      "loss: 0.000000  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 3.1%, Avg loss: 11.227942 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 3.2%, Avg loss: 11.198406 \n",
      "\n",
      " Error: \n",
      " Accuracy: 3.2%  \n",
      "\n",
      "AUC value is: 0.4906392876867107\n",
      "Accuracy is: 0.4904833333333333\n",
      " Error: \n",
      " Accuracy: 3.2%  \n",
      "\n",
      "AUC value is: 0.5004330656368757\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.611558  [  128/30013]\n",
      "loss: 4.532756  [12928/30013]\n",
      "loss: 4.485042  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 2.4%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.247937  [  128/30013]\n",
      "loss: 4.331451  [12928/30013]\n",
      "loss: 4.386824  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 4.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 4.147511  [  128/30013]\n",
      "loss: 4.185678  [12928/30013]\n",
      "loss: 4.255376  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 6.3%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 4.038897  [  128/30013]\n",
      "loss: 4.042597  [12928/30013]\n",
      "loss: 4.129270  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 8.1%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 3.917339  [  128/30013]\n",
      "loss: 3.901729  [12928/30013]\n",
      "loss: 3.996742  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 10.2%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 3.781704  [  128/30013]\n",
      "loss: 3.762050  [12928/30013]\n",
      "loss: 3.857545  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 12.5%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 3.633597  [  128/30013]\n",
      "loss: 3.617002  [12928/30013]\n",
      "loss: 3.712403  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 15.3%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 3.480807  [  128/30013]\n",
      "loss: 3.459168  [12928/30013]\n",
      "loss: 3.554995  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 18.6%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 3.319289  [  128/30013]\n",
      "loss: 3.283685  [12928/30013]\n",
      "loss: 3.381765  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 22.5%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 3.140575  [  128/30013]\n",
      "loss: 3.088507  [12928/30013]\n",
      "loss: 3.191096  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 27.0%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 2.942194  [  128/30013]\n",
      "loss: 2.872534  [12928/30013]\n",
      "loss: 2.979396  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 32.1%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 2.725240  [  128/30013]\n",
      "loss: 2.633154  [12928/30013]\n",
      "loss: 2.744178  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 38.0%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 2.486730  [  128/30013]\n",
      "loss: 2.366604  [12928/30013]\n",
      "loss: 2.485141  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.6%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 2.221310  [  128/30013]\n",
      "loss: 2.079909  [12928/30013]\n",
      "loss: 2.206441  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 52.0%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.931725  [  128/30013]\n",
      "loss: 1.782480  [12928/30013]\n",
      "loss: 1.912627  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.1%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.629747  [  128/30013]\n",
      "loss: 1.480262  [12928/30013]\n",
      "loss: 1.606643  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.6%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.329680  [  128/30013]\n",
      "loss: 1.188433  [12928/30013]\n",
      "loss: 1.297753  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.7%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.046500  [  128/30013]\n",
      "loss: 0.920723  [12928/30013]\n",
      "loss: 1.001640  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.5%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.790696  [  128/30013]\n",
      "loss: 0.685728  [12928/30013]\n",
      "loss: 0.735915  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.569172  [  128/30013]\n",
      "loss: 0.492193  [12928/30013]\n",
      "loss: 0.512499  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.5%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.390087  [  128/30013]\n",
      "loss: 0.342795  [12928/30013]\n",
      "loss: 0.340508  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.2%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.258224  [  128/30013]\n",
      "loss: 0.233567  [12928/30013]\n",
      "loss: 0.223239  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.171828  [  128/30013]\n",
      "loss: 0.159494  [12928/30013]\n",
      "loss: 0.149700  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.119053  [  128/30013]\n",
      "loss: 0.112087  [12928/30013]\n",
      "loss: 0.105170  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.086129  [  128/30013]\n",
      "loss: 0.081707  [12928/30013]\n",
      "loss: 0.077495  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.064884  [  128/30013]\n",
      "loss: 0.061764  [12928/30013]\n",
      "loss: 0.059250  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.050439  [  128/30013]\n",
      "loss: 0.048103  [12928/30013]\n",
      "loss: 0.046562  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.040120  [  128/30013]\n",
      "loss: 0.038319  [12928/30013]\n",
      "loss: 0.037340  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.032471  [  128/30013]\n",
      "loss: 0.031047  [12928/30013]\n",
      "loss: 0.030409  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.026637  [  128/30013]\n",
      "loss: 0.025485  [12928/30013]\n",
      "loss: 0.025063  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.022086  [  128/30013]\n",
      "loss: 0.021136  [12928/30013]\n",
      "loss: 0.020855  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.018468  [  128/30013]\n",
      "loss: 0.017673  [12928/30013]\n",
      "loss: 0.017487  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.015550  [  128/30013]\n",
      "loss: 0.014878  [12928/30013]\n",
      "loss: 0.014756  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.013168  [  128/30013]\n",
      "loss: 0.012594  [12928/30013]\n",
      "loss: 0.012516  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.011202  [  128/30013]\n",
      "loss: 0.010710  [12928/30013]\n",
      "loss: 0.010662  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.009568  [  128/30013]\n",
      "loss: 0.009142  [12928/30013]\n",
      "loss: 0.009115  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.008199  [  128/30013]\n",
      "loss: 0.007829  [12928/30013]\n",
      "loss: 0.007817  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.007045  [  128/30013]\n",
      "loss: 0.006724  [12928/30013]\n",
      "loss: 0.006721  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.006068  [  128/30013]\n",
      "loss: 0.005788  [12928/30013]\n",
      "loss: 0.005791  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.005238  [  128/30013]\n",
      "loss: 0.004993  [12928/30013]\n",
      "loss: 0.004999  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.004529  [  128/30013]\n",
      "loss: 0.004314  [12928/30013]\n",
      "loss: 0.004323  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.003922  [  128/30013]\n",
      "loss: 0.003734  [12928/30013]\n",
      "loss: 0.003743  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.003402  [  128/30013]\n",
      "loss: 0.003236  [12928/30013]\n",
      "loss: 0.003245  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.002954  [  128/30013]\n",
      "loss: 0.002808  [12928/30013]\n",
      "loss: 0.002816  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.002567  [  128/30013]\n",
      "loss: 0.002439  [12928/30013]\n",
      "loss: 0.002447  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.002234  [  128/30013]\n",
      "loss: 0.002121  [12928/30013]\n",
      "loss: 0.002128  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.001945  [  128/30013]\n",
      "loss: 0.001846  [12928/30013]\n",
      "loss: 0.001852  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.001695  [  128/30013]\n",
      "loss: 0.001608  [12928/30013]\n",
      "loss: 0.001613  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.001478  [  128/30013]\n",
      "loss: 0.001401  [12928/30013]\n",
      "loss: 0.001406  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.001290  [  128/30013]\n",
      "loss: 0.001222  [12928/30013]\n",
      "loss: 0.001226  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.001126  [  128/30013]\n",
      "loss: 0.001067  [12928/30013]\n",
      "loss: 0.001070  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.000984  [  128/30013]\n",
      "loss: 0.000931  [12928/30013]\n",
      "loss: 0.000934  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.000860  [  128/30013]\n",
      "loss: 0.000814  [12928/30013]\n",
      "loss: 0.000816  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.000752  [  128/30013]\n",
      "loss: 0.000711  [12928/30013]\n",
      "loss: 0.000713  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.000658  [  128/30013]\n",
      "loss: 0.000622  [12928/30013]\n",
      "loss: 0.000623  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.000575  [  128/30013]\n",
      "loss: 0.000544  [12928/30013]\n",
      "loss: 0.000545  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.000504  [  128/30013]\n",
      "loss: 0.000476  [12928/30013]\n",
      "loss: 0.000477  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.000441  [  128/30013]\n",
      "loss: 0.000417  [12928/30013]\n",
      "loss: 0.000417  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.000386  [  128/30013]\n",
      "loss: 0.000365  [12928/30013]\n",
      "loss: 0.000365  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.000338  [  128/30013]\n",
      "loss: 0.000320  [12928/30013]\n",
      "loss: 0.000320  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000296  [  128/30013]\n",
      "loss: 0.000280  [12928/30013]\n",
      "loss: 0.000280  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000260  [  128/30013]\n",
      "loss: 0.000245  [12928/30013]\n",
      "loss: 0.000246  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000228  [  128/30013]\n",
      "loss: 0.000215  [12928/30013]\n",
      "loss: 0.000216  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000199  [  128/30013]\n",
      "loss: 0.000189  [12928/30013]\n",
      "loss: 0.000189  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000175  [  128/30013]\n",
      "loss: 0.000165  [12928/30013]\n",
      "loss: 0.000166  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000153  [  128/30013]\n",
      "loss: 0.000145  [12928/30013]\n",
      "loss: 0.000145  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000135  [  128/30013]\n",
      "loss: 0.000127  [12928/30013]\n",
      "loss: 0.000128  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000118  [  128/30013]\n",
      "loss: 0.000112  [12928/30013]\n",
      "loss: 0.000112  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000104  [  128/30013]\n",
      "loss: 0.000098  [12928/30013]\n",
      "loss: 0.000098  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000091  [  128/30013]\n",
      "loss: 0.000086  [12928/30013]\n",
      "loss: 0.000086  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000080  [  128/30013]\n",
      "loss: 0.000076  [12928/30013]\n",
      "loss: 0.000076  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000070  [  128/30013]\n",
      "loss: 0.000067  [12928/30013]\n",
      "loss: 0.000067  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000061  [  128/30013]\n",
      "loss: 0.000058  [12928/30013]\n",
      "loss: 0.000059  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000054  [  128/30013]\n",
      "loss: 0.000051  [12928/30013]\n",
      "loss: 0.000051  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000047  [  128/30013]\n",
      "loss: 0.000045  [12928/30013]\n",
      "loss: 0.000045  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000042  [  128/30013]\n",
      "loss: 0.000040  [12928/30013]\n",
      "loss: 0.000040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000037  [  128/30013]\n",
      "loss: 0.000035  [12928/30013]\n",
      "loss: 0.000035  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000032  [  128/30013]\n",
      "loss: 0.000031  [12928/30013]\n",
      "loss: 0.000031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000028  [  128/30013]\n",
      "loss: 0.000027  [12928/30013]\n",
      "loss: 0.000027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000025  [  128/30013]\n",
      "loss: 0.000024  [12928/30013]\n",
      "loss: 0.000024  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000022  [  128/30013]\n",
      "loss: 0.000021  [12928/30013]\n",
      "loss: 0.000021  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000019  [  128/30013]\n",
      "loss: 0.000018  [12928/30013]\n",
      "loss: 0.000018  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000017  [  128/30013]\n",
      "loss: 0.000016  [12928/30013]\n",
      "loss: 0.000016  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000015  [  128/30013]\n",
      "loss: 0.000014  [12928/30013]\n",
      "loss: 0.000014  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000013  [  128/30013]\n",
      "loss: 0.000013  [12928/30013]\n",
      "loss: 0.000013  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000011  [  128/30013]\n",
      "loss: 0.000011  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: 0.000010  [12928/30013]\n",
      "loss: 0.000010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/30013]\n",
      "loss: 0.000009  [12928/30013]\n",
      "loss: 0.000009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/30013]\n",
      "loss: 0.000008  [12928/30013]\n",
      "loss: 0.000008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: 0.000007  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000005  [  128/30013]\n",
      "loss: 0.000005  [12928/30013]\n",
      "loss: 0.000005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000004  [  128/30013]\n",
      "loss: 0.000004  [12928/30013]\n",
      "loss: 0.000004  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000003  [  128/30013]\n",
      "loss: 0.000003  [12928/30013]\n",
      "loss: 0.000003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000002  [  128/30013]\n",
      "loss: 0.000002  [12928/30013]\n",
      "loss: 0.000002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 6.1%, Avg loss: 8.212510 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 6.4%, Avg loss: 8.101015 \n",
      "\n",
      " Error: \n",
      " Accuracy: 6.4%  \n",
      "\n",
      "AUC value is: 0.49072977103703475\n",
      "Accuracy is: 0.49051666666666666\n",
      " Error: \n",
      " Accuracy: 6.4%  \n",
      "\n",
      "AUC value is: 0.4994627148991098\n",
      "Accuracy is: 0.49978333333333336\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.607079  [  128/30013]\n",
      "loss: 4.459784  [12928/30013]\n",
      "loss: 4.357311  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 3.1%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 4.140593  [  128/30013]\n",
      "loss: 4.156826  [12928/30013]\n",
      "loss: 4.274594  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 5.5%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 3.895622  [  128/30013]\n",
      "loss: 4.016322  [12928/30013]\n",
      "loss: 4.148347  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 7.5%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 3.731396  [  128/30013]\n",
      "loss: 3.896566  [12928/30013]\n",
      "loss: 4.024113  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 9.1%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 3.614724  [  128/30013]\n",
      "loss: 3.782290  [12928/30013]\n",
      "loss: 3.917027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 10.8%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 3.495706  [  128/30013]\n",
      "loss: 3.664375  [12928/30013]\n",
      "loss: 3.812935  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 12.7%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 3.374599  [  128/30013]\n",
      "loss: 3.538613  [12928/30013]\n",
      "loss: 3.701596  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 14.7%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 3.252784  [  128/30013]\n",
      "loss: 3.404613  [12928/30013]\n",
      "loss: 3.580524  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 17.0%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 3.124543  [  128/30013]\n",
      "loss: 3.264950  [12928/30013]\n",
      "loss: 3.450084  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 19.5%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 2.988744  [  128/30013]\n",
      "loss: 3.121930  [12928/30013]\n",
      "loss: 3.309120  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 22.3%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 2.847578  [  128/30013]\n",
      "loss: 2.975627  [12928/30013]\n",
      "loss: 3.156636  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 25.2%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 2.700567  [  128/30013]\n",
      "loss: 2.823777  [12928/30013]\n",
      "loss: 2.993363  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 28.8%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 2.545177  [  128/30013]\n",
      "loss: 2.664827  [12928/30013]\n",
      "loss: 2.822151  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 33.0%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 2.378752  [  128/30013]\n",
      "loss: 2.497307  [12928/30013]\n",
      "loss: 2.643322  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 37.5%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 2.200693  [  128/30013]\n",
      "loss: 2.319044  [12928/30013]\n",
      "loss: 2.453620  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.3%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 2.012292  [  128/30013]\n",
      "loss: 2.131350  [12928/30013]\n",
      "loss: 2.253072  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.6%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.816950  [  128/30013]\n",
      "loss: 1.939628  [12928/30013]\n",
      "loss: 2.047421  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 53.4%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.619293  [  128/30013]\n",
      "loss: 1.749292  [12928/30013]\n",
      "loss: 1.840770  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.5%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 1.423876  [  128/30013]\n",
      "loss: 1.562987  [12928/30013]\n",
      "loss: 1.631836  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.7%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 1.235053  [  128/30013]\n",
      "loss: 1.380659  [12928/30013]\n",
      "loss: 1.419680  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.8%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.057132  [  128/30013]\n",
      "loss: 1.200649  [12928/30013]\n",
      "loss: 1.207036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.7%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.891574  [  128/30013]\n",
      "loss: 1.022682  [12928/30013]\n",
      "loss: 1.000401  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.4%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.737865  [  128/30013]\n",
      "loss: 0.849921  [12928/30013]\n",
      "loss: 0.808149  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.5%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.597464  [  128/30013]\n",
      "loss: 0.687960  [12928/30013]\n",
      "loss: 0.638347  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.7%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.473466  [  128/30013]\n",
      "loss: 0.542549  [12928/30013]\n",
      "loss: 0.495308  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.9%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.368024  [  128/30013]\n",
      "loss: 0.415627  [12928/30013]\n",
      "loss: 0.378799  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.9%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.281693  [  128/30013]\n",
      "loss: 0.310587  [12928/30013]\n",
      "loss: 0.286218  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.213869  [  128/30013]\n",
      "loss: 0.231417  [12928/30013]\n",
      "loss: 0.214554  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.161651  [  128/30013]\n",
      "loss: 0.173920  [12928/30013]\n",
      "loss: 0.161809  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.122679  [  128/30013]\n",
      "loss: 0.131385  [12928/30013]\n",
      "loss: 0.123827  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.094523  [  128/30013]\n",
      "loss: 0.099966  [12928/30013]\n",
      "loss: 0.096080  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.074132  [  128/30013]\n",
      "loss: 0.077071  [12928/30013]\n",
      "loss: 0.075135  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.059126  [  128/30013]\n",
      "loss: 0.060668  [12928/30013]\n",
      "loss: 0.059135  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.047921  [  128/30013]\n",
      "loss: 0.048969  [12928/30013]\n",
      "loss: 0.047524  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.039367  [  128/30013]\n",
      "loss: 0.040245  [12928/30013]\n",
      "loss: 0.039129  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.032686  [  128/30013]\n",
      "loss: 0.033467  [12928/30013]\n",
      "loss: 0.032577  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.027367  [  128/30013]\n",
      "loss: 0.028073  [12928/30013]\n",
      "loss: 0.027339  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.023069  [  128/30013]\n",
      "loss: 0.023708  [12928/30013]\n",
      "loss: 0.023089  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.019556  [  128/30013]\n",
      "loss: 0.020131  [12928/30013]\n",
      "loss: 0.019600  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.016656  [  128/30013]\n",
      "loss: 0.017171  [12928/30013]\n",
      "loss: 0.016708  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.014244  [  128/30013]\n",
      "loss: 0.014700  [12928/30013]\n",
      "loss: 0.014292  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.012223  [  128/30013]\n",
      "loss: 0.012625  [12928/30013]\n",
      "loss: 0.012262  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.010520  [  128/30013]\n",
      "loss: 0.010872  [12928/30013]\n",
      "loss: 0.010545  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.009078  [  128/30013]\n",
      "loss: 0.009383  [12928/30013]\n",
      "loss: 0.009088  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.007851  [  128/30013]\n",
      "loss: 0.008115  [12928/30013]\n",
      "loss: 0.007845  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.006804  [  128/30013]\n",
      "loss: 0.007030  [12928/30013]\n",
      "loss: 0.006782  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.005906  [  128/30013]\n",
      "loss: 0.006099  [12928/30013]\n",
      "loss: 0.005872  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.005134  [  128/30013]\n",
      "loss: 0.005299  [12928/30013]\n",
      "loss: 0.005090  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.004469  [  128/30013]\n",
      "loss: 0.004608  [12928/30013]\n",
      "loss: 0.004418  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.003894  [  128/30013]\n",
      "loss: 0.004012  [12928/30013]\n",
      "loss: 0.003840  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.003396  [  128/30013]\n",
      "loss: 0.003496  [12928/30013]\n",
      "loss: 0.003341  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.002965  [  128/30013]\n",
      "loss: 0.003049  [12928/30013]\n",
      "loss: 0.002910  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.002590  [  128/30013]\n",
      "loss: 0.002661  [12928/30013]\n",
      "loss: 0.002536  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.002265  [  128/30013]\n",
      "loss: 0.002324  [12928/30013]\n",
      "loss: 0.002213  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.001981  [  128/30013]\n",
      "loss: 0.002031  [12928/30013]\n",
      "loss: 0.001932  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.001734  [  128/30013]\n",
      "loss: 0.001776  [12928/30013]\n",
      "loss: 0.001688  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.001519  [  128/30013]\n",
      "loss: 0.001553  [12928/30013]\n",
      "loss: 0.001475  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.001330  [  128/30013]\n",
      "loss: 0.001360  [12928/30013]\n",
      "loss: 0.001290  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.001166  [  128/30013]\n",
      "loss: 0.001190  [12928/30013]\n",
      "loss: 0.001129  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.001022  [  128/30013]\n",
      "loss: 0.001043  [12928/30013]\n",
      "loss: 0.000988  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.000896  [  128/30013]\n",
      "loss: 0.000914  [12928/30013]\n",
      "loss: 0.000865  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.000786  [  128/30013]\n",
      "loss: 0.000801  [12928/30013]\n",
      "loss: 0.000757  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.000690  [  128/30013]\n",
      "loss: 0.000702  [12928/30013]\n",
      "loss: 0.000663  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.000605  [  128/30013]\n",
      "loss: 0.000616  [12928/30013]\n",
      "loss: 0.000581  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.000531  [  128/30013]\n",
      "loss: 0.000541  [12928/30013]\n",
      "loss: 0.000509  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.000467  [  128/30013]\n",
      "loss: 0.000475  [12928/30013]\n",
      "loss: 0.000447  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.000410  [  128/30013]\n",
      "loss: 0.000417  [12928/30013]\n",
      "loss: 0.000392  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000360  [  128/30013]\n",
      "loss: 0.000366  [12928/30013]\n",
      "loss: 0.000344  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000316  [  128/30013]\n",
      "loss: 0.000322  [12928/30013]\n",
      "loss: 0.000301  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000278  [  128/30013]\n",
      "loss: 0.000283  [12928/30013]\n",
      "loss: 0.000265  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000244  [  128/30013]\n",
      "loss: 0.000248  [12928/30013]\n",
      "loss: 0.000232  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000214  [  128/30013]\n",
      "loss: 0.000218  [12928/30013]\n",
      "loss: 0.000204  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000188  [  128/30013]\n",
      "loss: 0.000192  [12928/30013]\n",
      "loss: 0.000179  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000166  [  128/30013]\n",
      "loss: 0.000169  [12928/30013]\n",
      "loss: 0.000157  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000146  [  128/30013]\n",
      "loss: 0.000149  [12928/30013]\n",
      "loss: 0.000138  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000128  [  128/30013]\n",
      "loss: 0.000131  [12928/30013]\n",
      "loss: 0.000122  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000113  [  128/30013]\n",
      "loss: 0.000115  [12928/30013]\n",
      "loss: 0.000107  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000099  [  128/30013]\n",
      "loss: 0.000101  [12928/30013]\n",
      "loss: 0.000094  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000087  [  128/30013]\n",
      "loss: 0.000089  [12928/30013]\n",
      "loss: 0.000083  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000077  [  128/30013]\n",
      "loss: 0.000078  [12928/30013]\n",
      "loss: 0.000073  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000068  [  128/30013]\n",
      "loss: 0.000069  [12928/30013]\n",
      "loss: 0.000064  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000059  [  128/30013]\n",
      "loss: 0.000061  [12928/30013]\n",
      "loss: 0.000056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000052  [  128/30013]\n",
      "loss: 0.000054  [12928/30013]\n",
      "loss: 0.000050  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000046  [  128/30013]\n",
      "loss: 0.000047  [12928/30013]\n",
      "loss: 0.000044  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000041  [  128/30013]\n",
      "loss: 0.000042  [12928/30013]\n",
      "loss: 0.000038  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000036  [  128/30013]\n",
      "loss: 0.000037  [12928/30013]\n",
      "loss: 0.000034  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000031  [  128/30013]\n",
      "loss: 0.000032  [12928/30013]\n",
      "loss: 0.000030  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000028  [  128/30013]\n",
      "loss: 0.000028  [12928/30013]\n",
      "loss: 0.000026  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000024  [  128/30013]\n",
      "loss: 0.000025  [12928/30013]\n",
      "loss: 0.000023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000022  [  128/30013]\n",
      "loss: 0.000022  [12928/30013]\n",
      "loss: 0.000020  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000019  [  128/30013]\n",
      "loss: 0.000019  [12928/30013]\n",
      "loss: 0.000018  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000017  [  128/30013]\n",
      "loss: 0.000017  [12928/30013]\n",
      "loss: 0.000016  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000015  [  128/30013]\n",
      "loss: 0.000015  [12928/30013]\n",
      "loss: 0.000014  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000013  [  128/30013]\n",
      "loss: 0.000013  [12928/30013]\n",
      "loss: 0.000012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000011  [  128/30013]\n",
      "loss: 0.000012  [12928/30013]\n",
      "loss: 0.000011  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000010  [  128/30013]\n",
      "loss: 0.000010  [12928/30013]\n",
      "loss: 0.000010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000009  [  128/30013]\n",
      "loss: 0.000009  [12928/30013]\n",
      "loss: 0.000008  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000008  [  128/30013]\n",
      "loss: 0.000008  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000007  [  128/30013]\n",
      "loss: 0.000007  [12928/30013]\n",
      "loss: 0.000007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000006  [  128/30013]\n",
      "loss: 0.000006  [12928/30013]\n",
      "loss: 0.000006  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 11.5%, Avg loss: 6.757771 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 11.5%, Avg loss: 6.699120 \n",
      "\n",
      " Error: \n",
      " Accuracy: 11.5%  \n",
      "\n",
      "AUC value is: 0.4930738103660821\n",
      "Accuracy is: 0.49056666666666665\n",
      " Error: \n",
      " Accuracy: 11.5%  \n",
      "\n",
      "AUC value is: 0.5004050984094017\n",
      "Accuracy is: 0.4997666666666667\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.606715  [  128/30013]\n",
      "loss: 4.088202  [12928/30013]\n",
      "loss: 3.717566  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 5.8%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 3.819006  [  128/30013]\n",
      "loss: 3.613777  [12928/30013]\n",
      "loss: 3.539797  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 11.0%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 3.719579  [  128/30013]\n",
      "loss: 3.505133  [12928/30013]\n",
      "loss: 3.468089  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 12.7%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 3.611294  [  128/30013]\n",
      "loss: 3.453977  [12928/30013]\n",
      "loss: 3.393725  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 14.5%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 3.503774  [  128/30013]\n",
      "loss: 3.381705  [12928/30013]\n",
      "loss: 3.314325  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 16.2%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 3.399627  [  128/30013]\n",
      "loss: 3.296530  [12928/30013]\n",
      "loss: 3.231878  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 17.9%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 3.309002  [  128/30013]\n",
      "loss: 3.209932  [12928/30013]\n",
      "loss: 3.150856  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 19.5%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 3.219246  [  128/30013]\n",
      "loss: 3.130173  [12928/30013]\n",
      "loss: 3.067401  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 21.1%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 3.129367  [  128/30013]\n",
      "loss: 3.057964  [12928/30013]\n",
      "loss: 2.979252  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 22.7%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 3.040526  [  128/30013]\n",
      "loss: 2.986201  [12928/30013]\n",
      "loss: 2.888010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 24.6%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 2.952782  [  128/30013]\n",
      "loss: 2.907955  [12928/30013]\n",
      "loss: 2.797176  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 26.5%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 2.865600  [  128/30013]\n",
      "loss: 2.823278  [12928/30013]\n",
      "loss: 2.707857  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 28.6%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 2.779077  [  128/30013]\n",
      "loss: 2.734828  [12928/30013]\n",
      "loss: 2.620028  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 30.7%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 2.691732  [  128/30013]\n",
      "loss: 2.645420  [12928/30013]\n",
      "loss: 2.534698  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 32.8%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 2.600787  [  128/30013]\n",
      "loss: 2.557172  [12928/30013]\n",
      "loss: 2.451999  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 35.1%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 2.503483  [  128/30013]\n",
      "loss: 2.470407  [12928/30013]\n",
      "loss: 2.370782  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 37.3%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 2.399383  [  128/30013]\n",
      "loss: 2.382678  [12928/30013]\n",
      "loss: 2.289308  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.8%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 2.291137  [  128/30013]\n",
      "loss: 2.290408  [12928/30013]\n",
      "loss: 2.205585  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.6%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 2.182653  [  128/30013]\n",
      "loss: 2.191925  [12928/30013]\n",
      "loss: 2.117859  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 45.2%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 2.075557  [  128/30013]\n",
      "loss: 2.088325  [12928/30013]\n",
      "loss: 2.025040  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.2%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.968484  [  128/30013]\n",
      "loss: 1.981660  [12928/30013]\n",
      "loss: 1.927423  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.3%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 1.859605  [  128/30013]\n",
      "loss: 1.873638  [12928/30013]\n",
      "loss: 1.826425  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.5%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 1.748570  [  128/30013]\n",
      "loss: 1.765122  [12928/30013]\n",
      "loss: 1.722730  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 57.9%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 1.636568  [  128/30013]\n",
      "loss: 1.656039  [12928/30013]\n",
      "loss: 1.616099  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.3%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 1.524871  [  128/30013]\n",
      "loss: 1.545425  [12928/30013]\n",
      "loss: 1.506231  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.0%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 1.414154  [  128/30013]\n",
      "loss: 1.432131  [12928/30013]\n",
      "loss: 1.393257  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.8%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 1.304524  [  128/30013]\n",
      "loss: 1.316407  [12928/30013]\n",
      "loss: 1.278315  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.3%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 1.196051  [  128/30013]\n",
      "loss: 1.200368  [12928/30013]\n",
      "loss: 1.163388  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.9%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 1.089477  [  128/30013]\n",
      "loss: 1.086188  [12928/30013]\n",
      "loss: 1.049415  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.5%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.985391  [  128/30013]\n",
      "loss: 0.974381  [12928/30013]\n",
      "loss: 0.936480  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.883751  [  128/30013]\n",
      "loss: 0.865167  [12928/30013]\n",
      "loss: 0.825330  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 86.2%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.784270  [  128/30013]\n",
      "loss: 0.759818  [12928/30013]\n",
      "loss: 0.717771  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.1%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.687764  [  128/30013]\n",
      "loss: 0.659492  [12928/30013]\n",
      "loss: 0.616479  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.7%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.597342  [  128/30013]\n",
      "loss: 0.565109  [12928/30013]\n",
      "loss: 0.523078  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.8%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.514721  [  128/30013]\n",
      "loss: 0.478036  [12928/30013]\n",
      "loss: 0.437856  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.5%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.439424  [  128/30013]\n",
      "loss: 0.399844  [12928/30013]\n",
      "loss: 0.362020  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.371766  [  128/30013]\n",
      "loss: 0.331674  [12928/30013]\n",
      "loss: 0.297864  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.312466  [  128/30013]\n",
      "loss: 0.273933  [12928/30013]\n",
      "loss: 0.245772  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.261772  [  128/30013]\n",
      "loss: 0.225497  [12928/30013]\n",
      "loss: 0.203470  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.219371  [  128/30013]\n",
      "loss: 0.185034  [12928/30013]\n",
      "loss: 0.168790  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.183824  [  128/30013]\n",
      "loss: 0.151546  [12928/30013]\n",
      "loss: 0.139924  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.152950  [  128/30013]\n",
      "loss: 0.124012  [12928/30013]\n",
      "loss: 0.115900  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.125170  [  128/30013]\n",
      "loss: 0.101828  [12928/30013]\n",
      "loss: 0.096088  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.101210  [  128/30013]\n",
      "loss: 0.084301  [12928/30013]\n",
      "loss: 0.079889  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.082663  [  128/30013]\n",
      "loss: 0.070362  [12928/30013]\n",
      "loss: 0.066737  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.068163  [  128/30013]\n",
      "loss: 0.059227  [12928/30013]\n",
      "loss: 0.056116  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.056730  [  128/30013]\n",
      "loss: 0.050158  [12928/30013]\n",
      "loss: 0.047514  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.047631  [  128/30013]\n",
      "loss: 0.042681  [12928/30013]\n",
      "loss: 0.040433  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.040266  [  128/30013]\n",
      "loss: 0.036484  [12928/30013]\n",
      "loss: 0.034539  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.034240  [  128/30013]\n",
      "loss: 0.031341  [12928/30013]\n",
      "loss: 0.029613  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.029250  [  128/30013]\n",
      "loss: 0.027034  [12928/30013]\n",
      "loss: 0.025489  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.025080  [  128/30013]\n",
      "loss: 0.023390  [12928/30013]\n",
      "loss: 0.022009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.021578  [  128/30013]\n",
      "loss: 0.020277  [12928/30013]\n",
      "loss: 0.019062  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.018621  [  128/30013]\n",
      "loss: 0.017607  [12928/30013]\n",
      "loss: 0.016553  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.016109  [  128/30013]\n",
      "loss: 0.015309  [12928/30013]\n",
      "loss: 0.014403  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.013964  [  128/30013]\n",
      "loss: 0.013327  [12928/30013]\n",
      "loss: 0.012554  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.012130  [  128/30013]\n",
      "loss: 0.011614  [12928/30013]\n",
      "loss: 0.010957  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.010561  [  128/30013]\n",
      "loss: 0.010130  [12928/30013]\n",
      "loss: 0.009576  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.009216  [  128/30013]\n",
      "loss: 0.008841  [12928/30013]\n",
      "loss: 0.008377  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.008055  [  128/30013]\n",
      "loss: 0.007721  [12928/30013]\n",
      "loss: 0.007335  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.007049  [  128/30013]\n",
      "loss: 0.006747  [12928/30013]\n",
      "loss: 0.006427  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.006174  [  128/30013]\n",
      "loss: 0.005898  [12928/30013]\n",
      "loss: 0.005636  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.005411  [  128/30013]\n",
      "loss: 0.005159  [12928/30013]\n",
      "loss: 0.004945  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.004746  [  128/30013]\n",
      "loss: 0.004516  [12928/30013]\n",
      "loss: 0.004342  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.004164  [  128/30013]\n",
      "loss: 0.003956  [12928/30013]\n",
      "loss: 0.003814  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.003656  [  128/30013]\n",
      "loss: 0.003468  [12928/30013]\n",
      "loss: 0.003353  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.003210  [  128/30013]\n",
      "loss: 0.003043  [12928/30013]\n",
      "loss: 0.002948  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.002821  [  128/30013]\n",
      "loss: 0.002672  [12928/30013]\n",
      "loss: 0.002595  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.002480  [  128/30013]\n",
      "loss: 0.002348  [12928/30013]\n",
      "loss: 0.002284  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.002181  [  128/30013]\n",
      "loss: 0.002065  [12928/30013]\n",
      "loss: 0.002012  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.001919  [  128/30013]\n",
      "loss: 0.001816  [12928/30013]\n",
      "loss: 0.001773  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.001689  [  128/30013]\n",
      "loss: 0.001599  [12928/30013]\n",
      "loss: 0.001563  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.001488  [  128/30013]\n",
      "loss: 0.001407  [12928/30013]\n",
      "loss: 0.001379  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.001311  [  128/30013]\n",
      "loss: 0.001239  [12928/30013]\n",
      "loss: 0.001216  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.001155  [  128/30013]\n",
      "loss: 0.001091  [12928/30013]\n",
      "loss: 0.001073  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.001018  [  128/30013]\n",
      "loss: 0.000961  [12928/30013]\n",
      "loss: 0.000948  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000898  [  128/30013]\n",
      "loss: 0.000847  [12928/30013]\n",
      "loss: 0.000836  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000792  [  128/30013]\n",
      "loss: 0.000747  [12928/30013]\n",
      "loss: 0.000739  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000698  [  128/30013]\n",
      "loss: 0.000658  [12928/30013]\n",
      "loss: 0.000652  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000616  [  128/30013]\n",
      "loss: 0.000580  [12928/30013]\n",
      "loss: 0.000576  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000544  [  128/30013]\n",
      "loss: 0.000512  [12928/30013]\n",
      "loss: 0.000509  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000480  [  128/30013]\n",
      "loss: 0.000451  [12928/30013]\n",
      "loss: 0.000449  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000424  [  128/30013]\n",
      "loss: 0.000398  [12928/30013]\n",
      "loss: 0.000397  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000375  [  128/30013]\n",
      "loss: 0.000351  [12928/30013]\n",
      "loss: 0.000351  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000331  [  128/30013]\n",
      "loss: 0.000310  [12928/30013]\n",
      "loss: 0.000310  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000292  [  128/30013]\n",
      "loss: 0.000274  [12928/30013]\n",
      "loss: 0.000274  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000258  [  128/30013]\n",
      "loss: 0.000242  [12928/30013]\n",
      "loss: 0.000242  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000228  [  128/30013]\n",
      "loss: 0.000213  [12928/30013]\n",
      "loss: 0.000214  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000202  [  128/30013]\n",
      "loss: 0.000188  [12928/30013]\n",
      "loss: 0.000190  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000179  [  128/30013]\n",
      "loss: 0.000166  [12928/30013]\n",
      "loss: 0.000168  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000158  [  128/30013]\n",
      "loss: 0.000147  [12928/30013]\n",
      "loss: 0.000148  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000140  [  128/30013]\n",
      "loss: 0.000130  [12928/30013]\n",
      "loss: 0.000131  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000124  [  128/30013]\n",
      "loss: 0.000115  [12928/30013]\n",
      "loss: 0.000116  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000109  [  128/30013]\n",
      "loss: 0.000101  [12928/30013]\n",
      "loss: 0.000103  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000097  [  128/30013]\n",
      "loss: 0.000090  [12928/30013]\n",
      "loss: 0.000091  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000086  [  128/30013]\n",
      "loss: 0.000079  [12928/30013]\n",
      "loss: 0.000080  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000076  [  128/30013]\n",
      "loss: 0.000070  [12928/30013]\n",
      "loss: 0.000071  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000067  [  128/30013]\n",
      "loss: 0.000062  [12928/30013]\n",
      "loss: 0.000063  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000059  [  128/30013]\n",
      "loss: 0.000055  [12928/30013]\n",
      "loss: 0.000056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000053  [  128/30013]\n",
      "loss: 0.000048  [12928/30013]\n",
      "loss: 0.000049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 34.7%, Avg loss: 3.440421 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 36.3%, Avg loss: 3.302321 \n",
      "\n",
      " Error: \n",
      " Accuracy: 36.3%  \n",
      "\n",
      "AUC value is: 0.4990029737016695\n",
      "Accuracy is: 0.4909833333333333\n",
      " Error: \n",
      " Accuracy: 36.3%  \n",
      "\n",
      "AUC value is: 0.5067154173721173\n",
      "Accuracy is: 0.5007833333333334\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.605300  [  128/30013]\n",
      "loss: 3.579598  [12928/30013]\n",
      "loss: 3.059865  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 10.1%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.998333  [  128/30013]\n",
      "loss: 2.762079  [12928/30013]\n",
      "loss: 2.727778  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 21.5%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 2.863598  [  128/30013]\n",
      "loss: 2.610004  [12928/30013]\n",
      "loss: 2.634688  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 24.3%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 2.788186  [  128/30013]\n",
      "loss: 2.526335  [12928/30013]\n",
      "loss: 2.551325  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 26.0%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 2.723092  [  128/30013]\n",
      "loss: 2.467448  [12928/30013]\n",
      "loss: 2.459105  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 27.6%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 2.647521  [  128/30013]\n",
      "loss: 2.413133  [12928/30013]\n",
      "loss: 2.386446  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 29.2%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 2.580351  [  128/30013]\n",
      "loss: 2.381475  [12928/30013]\n",
      "loss: 2.344212  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 30.3%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 2.530812  [  128/30013]\n",
      "loss: 2.354972  [12928/30013]\n",
      "loss: 2.308770  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 31.4%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 2.483146  [  128/30013]\n",
      "loss: 2.327030  [12928/30013]\n",
      "loss: 2.273808  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 32.6%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 2.432600  [  128/30013]\n",
      "loss: 2.291239  [12928/30013]\n",
      "loss: 2.237026  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 33.7%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 2.381764  [  128/30013]\n",
      "loss: 2.245194  [12928/30013]\n",
      "loss: 2.197579  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 34.9%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 2.334262  [  128/30013]\n",
      "loss: 2.192116  [12928/30013]\n",
      "loss: 2.154519  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 36.4%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 2.289514  [  128/30013]\n",
      "loss: 2.135034  [12928/30013]\n",
      "loss: 2.111339  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 37.8%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 2.245737  [  128/30013]\n",
      "loss: 2.076926  [12928/30013]\n",
      "loss: 2.070068  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.1%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 2.202128  [  128/30013]\n",
      "loss: 2.021585  [12928/30013]\n",
      "loss: 2.029520  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 40.5%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 2.157513  [  128/30013]\n",
      "loss: 1.969859  [12928/30013]\n",
      "loss: 1.986747  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 41.9%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 2.111706  [  128/30013]\n",
      "loss: 1.920075  [12928/30013]\n",
      "loss: 1.940376  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.4%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 2.064070  [  128/30013]\n",
      "loss: 1.869489  [12928/30013]\n",
      "loss: 1.892467  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 45.1%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 2.013356  [  128/30013]\n",
      "loss: 1.815725  [12928/30013]\n",
      "loss: 1.843927  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.7%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 1.958745  [  128/30013]\n",
      "loss: 1.758271  [12928/30013]\n",
      "loss: 1.793877  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.6%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.899796  [  128/30013]\n",
      "loss: 1.698251  [12928/30013]\n",
      "loss: 1.741404  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.3%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 1.837161  [  128/30013]\n",
      "loss: 1.636445  [12928/30013]\n",
      "loss: 1.685694  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 52.2%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 1.772639  [  128/30013]\n",
      "loss: 1.573133  [12928/30013]\n",
      "loss: 1.625857  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.2%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 1.706949  [  128/30013]\n",
      "loss: 1.508947  [12928/30013]\n",
      "loss: 1.562502  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.2%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 1.639723  [  128/30013]\n",
      "loss: 1.444025  [12928/30013]\n",
      "loss: 1.497222  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 58.3%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 1.571061  [  128/30013]\n",
      "loss: 1.378475  [12928/30013]\n",
      "loss: 1.431111  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.3%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 1.501360  [  128/30013]\n",
      "loss: 1.312875  [12928/30013]\n",
      "loss: 1.364767  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.6%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 1.431166  [  128/30013]\n",
      "loss: 1.247941  [12928/30013]\n",
      "loss: 1.298665  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.6%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 1.360909  [  128/30013]\n",
      "loss: 1.184141  [12928/30013]\n",
      "loss: 1.232837  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.1%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 1.290304  [  128/30013]\n",
      "loss: 1.121108  [12928/30013]\n",
      "loss: 1.166563  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.4%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 1.218463  [  128/30013]\n",
      "loss: 1.057970  [12928/30013]\n",
      "loss: 1.098943  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.8%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 1.144690  [  128/30013]\n",
      "loss: 0.994883  [12928/30013]\n",
      "loss: 1.029651  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.2%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 1.069164  [  128/30013]\n",
      "loss: 0.932835  [12928/30013]\n",
      "loss: 0.959054  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.7%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.992540  [  128/30013]\n",
      "loss: 0.872605  [12928/30013]\n",
      "loss: 0.887852  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.1%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.915315  [  128/30013]\n",
      "loss: 0.814098  [12928/30013]\n",
      "loss: 0.817081  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.6%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.838025  [  128/30013]\n",
      "loss: 0.756891  [12928/30013]\n",
      "loss: 0.748185  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.9%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.761887  [  128/30013]\n",
      "loss: 0.700899  [12928/30013]\n",
      "loss: 0.682873  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 86.2%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.688128  [  128/30013]\n",
      "loss: 0.645759  [12928/30013]\n",
      "loss: 0.621977  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.3%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.617551  [  128/30013]\n",
      "loss: 0.590720  [12928/30013]\n",
      "loss: 0.565154  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.1%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.551153  [  128/30013]\n",
      "loss: 0.535587  [12928/30013]\n",
      "loss: 0.511675  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.9%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.489744  [  128/30013]\n",
      "loss: 0.481129  [12928/30013]\n",
      "loss: 0.461218  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.4%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.433660  [  128/30013]\n",
      "loss: 0.428042  [12928/30013]\n",
      "loss: 0.413674  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.8%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.382432  [  128/30013]\n",
      "loss: 0.377421  [12928/30013]\n",
      "loss: 0.368886  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.335618  [  128/30013]\n",
      "loss: 0.330703  [12928/30013]\n",
      "loss: 0.326657  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.293054  [  128/30013]\n",
      "loss: 0.288676  [12928/30013]\n",
      "loss: 0.286843  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.6%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.254484  [  128/30013]\n",
      "loss: 0.250753  [12928/30013]\n",
      "loss: 0.249510  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.2%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.219701  [  128/30013]\n",
      "loss: 0.215901  [12928/30013]\n",
      "loss: 0.214945  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.188870  [  128/30013]\n",
      "loss: 0.184618  [12928/30013]\n",
      "loss: 0.183494  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.161962  [  128/30013]\n",
      "loss: 0.157902  [12928/30013]\n",
      "loss: 0.155381  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.138434  [  128/30013]\n",
      "loss: 0.135140  [12928/30013]\n",
      "loss: 0.131057  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.118634  [  128/30013]\n",
      "loss: 0.115406  [12928/30013]\n",
      "loss: 0.111674  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.102054  [  128/30013]\n",
      "loss: 0.098103  [12928/30013]\n",
      "loss: 0.095754  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.087636  [  128/30013]\n",
      "loss: 0.083292  [12928/30013]\n",
      "loss: 0.082226  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.074854  [  128/30013]\n",
      "loss: 0.071149  [12928/30013]\n",
      "loss: 0.070827  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.063954  [  128/30013]\n",
      "loss: 0.060938  [12928/30013]\n",
      "loss: 0.061135  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.054634  [  128/30013]\n",
      "loss: 0.052300  [12928/30013]\n",
      "loss: 0.052777  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.046875  [  128/30013]\n",
      "loss: 0.044995  [12928/30013]\n",
      "loss: 0.045590  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.040292  [  128/30013]\n",
      "loss: 0.038716  [12928/30013]\n",
      "loss: 0.039375  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.034731  [  128/30013]\n",
      "loss: 0.033310  [12928/30013]\n",
      "loss: 0.034049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.030040  [  128/30013]\n",
      "loss: 0.028675  [12928/30013]\n",
      "loss: 0.029481  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.026008  [  128/30013]\n",
      "loss: 0.024811  [12928/30013]\n",
      "loss: 0.025556  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.022584  [  128/30013]\n",
      "loss: 0.021599  [12928/30013]\n",
      "loss: 0.022218  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.019568  [  128/30013]\n",
      "loss: 0.018753  [12928/30013]\n",
      "loss: 0.019359  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.016972  [  128/30013]\n",
      "loss: 0.016324  [12928/30013]\n",
      "loss: 0.016880  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.014778  [  128/30013]\n",
      "loss: 0.014239  [12928/30013]\n",
      "loss: 0.014750  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.012892  [  128/30013]\n",
      "loss: 0.012456  [12928/30013]\n",
      "loss: 0.012877  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.011266  [  128/30013]\n",
      "loss: 0.010907  [12928/30013]\n",
      "loss: 0.011277  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.009858  [  128/30013]\n",
      "loss: 0.009572  [12928/30013]\n",
      "loss: 0.009886  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.008636  [  128/30013]\n",
      "loss: 0.008415  [12928/30013]\n",
      "loss: 0.008674  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.007573  [  128/30013]\n",
      "loss: 0.007411  [12928/30013]\n",
      "loss: 0.007615  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.006648  [  128/30013]\n",
      "loss: 0.006536  [12928/30013]\n",
      "loss: 0.006689  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.005841  [  128/30013]\n",
      "loss: 0.005769  [12928/30013]\n",
      "loss: 0.005877  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.005137  [  128/30013]\n",
      "loss: 0.005096  [12928/30013]\n",
      "loss: 0.005166  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.004522  [  128/30013]\n",
      "loss: 0.004505  [12928/30013]\n",
      "loss: 0.004543  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.003986  [  128/30013]\n",
      "loss: 0.003986  [12928/30013]\n",
      "loss: 0.003998  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.003516  [  128/30013]\n",
      "loss: 0.003528  [12928/30013]\n",
      "loss: 0.003520  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.003104  [  128/30013]\n",
      "loss: 0.003124  [12928/30013]\n",
      "loss: 0.003102  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.002743  [  128/30013]\n",
      "loss: 0.002766  [12928/30013]\n",
      "loss: 0.002734  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.002425  [  128/30013]\n",
      "loss: 0.002449  [12928/30013]\n",
      "loss: 0.002410  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.002145  [  128/30013]\n",
      "loss: 0.002168  [12928/30013]\n",
      "loss: 0.002125  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.001898  [  128/30013]\n",
      "loss: 0.001918  [12928/30013]\n",
      "loss: 0.001874  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.001678  [  128/30013]\n",
      "loss: 0.001698  [12928/30013]\n",
      "loss: 0.001653  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.001484  [  128/30013]\n",
      "loss: 0.001502  [12928/30013]\n",
      "loss: 0.001459  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.001311  [  128/30013]\n",
      "loss: 0.001330  [12928/30013]\n",
      "loss: 0.001287  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.001157  [  128/30013]\n",
      "loss: 0.001177  [12928/30013]\n",
      "loss: 0.001136  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.001022  [  128/30013]\n",
      "loss: 0.001042  [12928/30013]\n",
      "loss: 0.001003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000904  [  128/30013]\n",
      "loss: 0.000922  [12928/30013]\n",
      "loss: 0.000885  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000800  [  128/30013]\n",
      "loss: 0.000816  [12928/30013]\n",
      "loss: 0.000781  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000708  [  128/30013]\n",
      "loss: 0.000722  [12928/30013]\n",
      "loss: 0.000689  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000627  [  128/30013]\n",
      "loss: 0.000639  [12928/30013]\n",
      "loss: 0.000608  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000555  [  128/30013]\n",
      "loss: 0.000566  [12928/30013]\n",
      "loss: 0.000537  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000492  [  128/30013]\n",
      "loss: 0.000500  [12928/30013]\n",
      "loss: 0.000473  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000435  [  128/30013]\n",
      "loss: 0.000442  [12928/30013]\n",
      "loss: 0.000418  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000385  [  128/30013]\n",
      "loss: 0.000391  [12928/30013]\n",
      "loss: 0.000369  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000341  [  128/30013]\n",
      "loss: 0.000346  [12928/30013]\n",
      "loss: 0.000325  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000301  [  128/30013]\n",
      "loss: 0.000305  [12928/30013]\n",
      "loss: 0.000287  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000267  [  128/30013]\n",
      "loss: 0.000269  [12928/30013]\n",
      "loss: 0.000254  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000236  [  128/30013]\n",
      "loss: 0.000237  [12928/30013]\n",
      "loss: 0.000224  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000209  [  128/30013]\n",
      "loss: 0.000209  [12928/30013]\n",
      "loss: 0.000198  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000185  [  128/30013]\n",
      "loss: 0.000185  [12928/30013]\n",
      "loss: 0.000175  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 52.3%, Avg loss: 2.224513 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 55.7%, Avg loss: 1.955905 \n",
      "\n",
      " Error: \n",
      " Accuracy: 55.7%  \n",
      "\n",
      "AUC value is: 0.5115175093849544\n",
      "Accuracy is: 0.4921\n",
      " Error: \n",
      " Accuracy: 55.7%  \n",
      "\n",
      "AUC value is: 0.5188415263157977\n",
      "Accuracy is: 0.5024166666666666\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.604047  [  128/30013]\n",
      "loss: 3.250818  [12928/30013]\n",
      "loss: 2.488869  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 15.3%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.405768  [  128/30013]\n",
      "loss: 2.057195  [12928/30013]\n",
      "loss: 2.153283  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 33.4%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 2.199198  [  128/30013]\n",
      "loss: 1.898462  [12928/30013]\n",
      "loss: 2.037943  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 37.0%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 2.110551  [  128/30013]\n",
      "loss: 1.799621  [12928/30013]\n",
      "loss: 1.979038  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 39.4%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 2.051437  [  128/30013]\n",
      "loss: 1.731118  [12928/30013]\n",
      "loss: 1.923591  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 41.0%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 2.005229  [  128/30013]\n",
      "loss: 1.687242  [12928/30013]\n",
      "loss: 1.886435  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.2%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.966149  [  128/30013]\n",
      "loss: 1.653164  [12928/30013]\n",
      "loss: 1.853085  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.2%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.928303  [  128/30013]\n",
      "loss: 1.617547  [12928/30013]\n",
      "loss: 1.815840  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 44.2%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.886624  [  128/30013]\n",
      "loss: 1.579635  [12928/30013]\n",
      "loss: 1.772448  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 45.2%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.837803  [  128/30013]\n",
      "loss: 1.545448  [12928/30013]\n",
      "loss: 1.732415  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 46.3%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.790958  [  128/30013]\n",
      "loss: 1.515103  [12928/30013]\n",
      "loss: 1.703703  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.5%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.752889  [  128/30013]\n",
      "loss: 1.487046  [12928/30013]\n",
      "loss: 1.676043  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.5%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.719428  [  128/30013]\n",
      "loss: 1.459806  [12928/30013]\n",
      "loss: 1.646459  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 49.4%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.686075  [  128/30013]\n",
      "loss: 1.434279  [12928/30013]\n",
      "loss: 1.615125  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.3%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.651803  [  128/30013]\n",
      "loss: 1.411020  [12928/30013]\n",
      "loss: 1.582912  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.2%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.616477  [  128/30013]\n",
      "loss: 1.388901  [12928/30013]\n",
      "loss: 1.550312  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 52.0%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.581209  [  128/30013]\n",
      "loss: 1.366593  [12928/30013]\n",
      "loss: 1.517838  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 52.9%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.546747  [  128/30013]\n",
      "loss: 1.343917  [12928/30013]\n",
      "loss: 1.485531  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 53.9%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 1.512774  [  128/30013]\n",
      "loss: 1.320696  [12928/30013]\n",
      "loss: 1.452882  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 1.478536  [  128/30013]\n",
      "loss: 1.296468  [12928/30013]\n",
      "loss: 1.419173  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 55.9%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.443288  [  128/30013]\n",
      "loss: 1.271015  [12928/30013]\n",
      "loss: 1.384545  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.9%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 1.406796  [  128/30013]\n",
      "loss: 1.243793  [12928/30013]\n",
      "loss: 1.348975  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 58.1%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 1.369358  [  128/30013]\n",
      "loss: 1.214544  [12928/30013]\n",
      "loss: 1.312676  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.2%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 1.331664  [  128/30013]\n",
      "loss: 1.183956  [12928/30013]\n",
      "loss: 1.276183  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.5%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 1.293904  [  128/30013]\n",
      "loss: 1.153124  [12928/30013]\n",
      "loss: 1.239398  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.8%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 1.255332  [  128/30013]\n",
      "loss: 1.122640  [12928/30013]\n",
      "loss: 1.202149  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.1%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 1.214970  [  128/30013]\n",
      "loss: 1.092238  [12928/30013]\n",
      "loss: 1.163994  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.5%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 1.172474  [  128/30013]\n",
      "loss: 1.060850  [12928/30013]\n",
      "loss: 1.124090  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.7%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 1.127768  [  128/30013]\n",
      "loss: 1.027376  [12928/30013]\n",
      "loss: 1.081822  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.1%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 1.080816  [  128/30013]\n",
      "loss: 0.991620  [12928/30013]\n",
      "loss: 1.037240  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.5%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 1.032567  [  128/30013]\n",
      "loss: 0.953929  [12928/30013]\n",
      "loss: 0.990956  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.1%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.984244  [  128/30013]\n",
      "loss: 0.914587  [12928/30013]\n",
      "loss: 0.943547  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.6%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.936075  [  128/30013]\n",
      "loss: 0.874214  [12928/30013]\n",
      "loss: 0.895641  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.2%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.887942  [  128/30013]\n",
      "loss: 0.833573  [12928/30013]\n",
      "loss: 0.847969  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.9%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.840250  [  128/30013]\n",
      "loss: 0.792868  [12928/30013]\n",
      "loss: 0.800826  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.6%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.793267  [  128/30013]\n",
      "loss: 0.751955  [12928/30013]\n",
      "loss: 0.754067  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.2%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.746871  [  128/30013]\n",
      "loss: 0.710683  [12928/30013]\n",
      "loss: 0.707753  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.0%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.700765  [  128/30013]\n",
      "loss: 0.669006  [12928/30013]\n",
      "loss: 0.662356  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.6%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.655012  [  128/30013]\n",
      "loss: 0.627292  [12928/30013]\n",
      "loss: 0.618188  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.4%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.610479  [  128/30013]\n",
      "loss: 0.586202  [12928/30013]\n",
      "loss: 0.575103  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.9%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.568126  [  128/30013]\n",
      "loss: 0.546219  [12928/30013]\n",
      "loss: 0.532946  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 86.4%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.528283  [  128/30013]\n",
      "loss: 0.507366  [12928/30013]\n",
      "loss: 0.491519  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.1%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.490695  [  128/30013]\n",
      "loss: 0.469325  [12928/30013]\n",
      "loss: 0.451010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.5%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.454626  [  128/30013]\n",
      "loss: 0.431687  [12928/30013]\n",
      "loss: 0.412049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.419239  [  128/30013]\n",
      "loss: 0.394743  [12928/30013]\n",
      "loss: 0.375338  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.2%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.384669  [  128/30013]\n",
      "loss: 0.359204  [12928/30013]\n",
      "loss: 0.341640  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.3%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.351365  [  128/30013]\n",
      "loss: 0.325412  [12928/30013]\n",
      "loss: 0.311219  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.3%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.318887  [  128/30013]\n",
      "loss: 0.293284  [12928/30013]\n",
      "loss: 0.283345  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.2%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.287220  [  128/30013]\n",
      "loss: 0.262808  [12928/30013]\n",
      "loss: 0.257670  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.1%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.256628  [  128/30013]\n",
      "loss: 0.234037  [12928/30013]\n",
      "loss: 0.234208  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.8%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.227725  [  128/30013]\n",
      "loss: 0.207492  [12928/30013]\n",
      "loss: 0.212642  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.201204  [  128/30013]\n",
      "loss: 0.183852  [12928/30013]\n",
      "loss: 0.192671  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.177109  [  128/30013]\n",
      "loss: 0.162838  [12928/30013]\n",
      "loss: 0.174278  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.4%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.155855  [  128/30013]\n",
      "loss: 0.143780  [12928/30013]\n",
      "loss: 0.157153  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.137190  [  128/30013]\n",
      "loss: 0.126470  [12928/30013]\n",
      "loss: 0.141851  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.120410  [  128/30013]\n",
      "loss: 0.110996  [12928/30013]\n",
      "loss: 0.127668  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.105012  [  128/30013]\n",
      "loss: 0.097253  [12928/30013]\n",
      "loss: 0.113970  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.091107  [  128/30013]\n",
      "loss: 0.085109  [12928/30013]\n",
      "loss: 0.101032  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.078940  [  128/30013]\n",
      "loss: 0.074733  [12928/30013]\n",
      "loss: 0.089291  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.068359  [  128/30013]\n",
      "loss: 0.065664  [12928/30013]\n",
      "loss: 0.078944  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.059022  [  128/30013]\n",
      "loss: 0.057591  [12928/30013]\n",
      "loss: 0.069289  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.051261  [  128/30013]\n",
      "loss: 0.050394  [12928/30013]\n",
      "loss: 0.060536  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.044978  [  128/30013]\n",
      "loss: 0.044259  [12928/30013]\n",
      "loss: 0.052321  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.039362  [  128/30013]\n",
      "loss: 0.038829  [12928/30013]\n",
      "loss: 0.045472  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.034510  [  128/30013]\n",
      "loss: 0.034052  [12928/30013]\n",
      "loss: 0.039650  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.030228  [  128/30013]\n",
      "loss: 0.029804  [12928/30013]\n",
      "loss: 0.034543  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.026419  [  128/30013]\n",
      "loss: 0.026151  [12928/30013]\n",
      "loss: 0.030088  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.023071  [  128/30013]\n",
      "loss: 0.022885  [12928/30013]\n",
      "loss: 0.026223  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.020219  [  128/30013]\n",
      "loss: 0.020026  [12928/30013]\n",
      "loss: 0.022886  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.017721  [  128/30013]\n",
      "loss: 0.017532  [12928/30013]\n",
      "loss: 0.019961  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.015571  [  128/30013]\n",
      "loss: 0.015348  [12928/30013]\n",
      "loss: 0.017405  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.013692  [  128/30013]\n",
      "loss: 0.013428  [12928/30013]\n",
      "loss: 0.015187  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.012049  [  128/30013]\n",
      "loss: 0.011765  [12928/30013]\n",
      "loss: 0.013256  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.010600  [  128/30013]\n",
      "loss: 0.010337  [12928/30013]\n",
      "loss: 0.011570  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.009316  [  128/30013]\n",
      "loss: 0.009102  [12928/30013]\n",
      "loss: 0.010084  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.008176  [  128/30013]\n",
      "loss: 0.007994  [12928/30013]\n",
      "loss: 0.008769  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.007185  [  128/30013]\n",
      "loss: 0.007032  [12928/30013]\n",
      "loss: 0.007628  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.006319  [  128/30013]\n",
      "loss: 0.006190  [12928/30013]\n",
      "loss: 0.006665  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.005560  [  128/30013]\n",
      "loss: 0.005454  [12928/30013]\n",
      "loss: 0.005852  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.004896  [  128/30013]\n",
      "loss: 0.004811  [12928/30013]\n",
      "loss: 0.005151  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.004316  [  128/30013]\n",
      "loss: 0.004247  [12928/30013]\n",
      "loss: 0.004538  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.003810  [  128/30013]\n",
      "loss: 0.003751  [12928/30013]\n",
      "loss: 0.004002  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.003367  [  128/30013]\n",
      "loss: 0.003314  [12928/30013]\n",
      "loss: 0.003533  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.002977  [  128/30013]\n",
      "loss: 0.002928  [12928/30013]\n",
      "loss: 0.003121  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.002634  [  128/30013]\n",
      "loss: 0.002587  [12928/30013]\n",
      "loss: 0.002758  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.002332  [  128/30013]\n",
      "loss: 0.002287  [12928/30013]\n",
      "loss: 0.002438  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.002065  [  128/30013]\n",
      "loss: 0.002022  [12928/30013]\n",
      "loss: 0.002154  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.001830  [  128/30013]\n",
      "loss: 0.001789  [12928/30013]\n",
      "loss: 0.001903  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.001622  [  128/30013]\n",
      "loss: 0.001583  [12928/30013]\n",
      "loss: 0.001682  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.001438  [  128/30013]\n",
      "loss: 0.001401  [12928/30013]\n",
      "loss: 0.001485  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.001274  [  128/30013]\n",
      "loss: 0.001240  [12928/30013]\n",
      "loss: 0.001311  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.001129  [  128/30013]\n",
      "loss: 0.001098  [12928/30013]\n",
      "loss: 0.001157  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.001001  [  128/30013]\n",
      "loss: 0.000972  [12928/30013]\n",
      "loss: 0.001022  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000886  [  128/30013]\n",
      "loss: 0.000860  [12928/30013]\n",
      "loss: 0.000901  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000785  [  128/30013]\n",
      "loss: 0.000761  [12928/30013]\n",
      "loss: 0.000794  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000694  [  128/30013]\n",
      "loss: 0.000674  [12928/30013]\n",
      "loss: 0.000699  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000614  [  128/30013]\n",
      "loss: 0.000596  [12928/30013]\n",
      "loss: 0.000615  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000543  [  128/30013]\n",
      "loss: 0.000528  [12928/30013]\n",
      "loss: 0.000541  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000480  [  128/30013]\n",
      "loss: 0.000468  [12928/30013]\n",
      "loss: 0.000475  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000425  [  128/30013]\n",
      "loss: 0.000414  [12928/30013]\n",
      "loss: 0.000418  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 65.1%, Avg loss: 1.470190 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 70.0%, Avg loss: 1.187227 \n",
      "\n",
      " Error: \n",
      " Accuracy: 70.0%  \n",
      "\n",
      "AUC value is: 0.5361201240047788\n",
      "Accuracy is: 0.4986833333333333\n",
      " Error: \n",
      " Accuracy: 70.0%  \n",
      "\n",
      "AUC value is: 0.5428552252694814\n",
      "Accuracy is: 0.50955\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.607439  [  128/30013]\n",
      "loss: 3.027809  [12928/30013]\n",
      "loss: 2.192272  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 19.4%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.066484  [  128/30013]\n",
      "loss: 1.617725  [12928/30013]\n",
      "loss: 1.687994  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 43.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.739932  [  128/30013]\n",
      "loss: 1.454542  [12928/30013]\n",
      "loss: 1.574774  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 48.0%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.620395  [  128/30013]\n",
      "loss: 1.381238  [12928/30013]\n",
      "loss: 1.519882  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.6%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.524092  [  128/30013]\n",
      "loss: 1.333522  [12928/30013]\n",
      "loss: 1.463823  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 52.6%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.450045  [  128/30013]\n",
      "loss: 1.284469  [12928/30013]\n",
      "loss: 1.413733  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.3%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.402388  [  128/30013]\n",
      "loss: 1.241293  [12928/30013]\n",
      "loss: 1.378152  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 55.5%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.359665  [  128/30013]\n",
      "loss: 1.204633  [12928/30013]\n",
      "loss: 1.346777  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.7%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.312080  [  128/30013]\n",
      "loss: 1.165839  [12928/30013]\n",
      "loss: 1.306608  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 57.9%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.259078  [  128/30013]\n",
      "loss: 1.124141  [12928/30013]\n",
      "loss: 1.263416  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.1%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.213338  [  128/30013]\n",
      "loss: 1.084609  [12928/30013]\n",
      "loss: 1.227467  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.9%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.183039  [  128/30013]\n",
      "loss: 1.054383  [12928/30013]\n",
      "loss: 1.193021  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.8%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.160206  [  128/30013]\n",
      "loss: 1.030767  [12928/30013]\n",
      "loss: 1.161567  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.6%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.137996  [  128/30013]\n",
      "loss: 1.011171  [12928/30013]\n",
      "loss: 1.133941  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.4%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.114289  [  128/30013]\n",
      "loss: 0.991930  [12928/30013]\n",
      "loss: 1.108861  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.2%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.089916  [  128/30013]\n",
      "loss: 0.970918  [12928/30013]\n",
      "loss: 1.085768  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.0%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.065465  [  128/30013]\n",
      "loss: 0.948232  [12928/30013]\n",
      "loss: 1.063986  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.7%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.040618  [  128/30013]\n",
      "loss: 0.924150  [12928/30013]\n",
      "loss: 1.042505  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.5%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 1.015154  [  128/30013]\n",
      "loss: 0.899373  [12928/30013]\n",
      "loss: 1.020923  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.4%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.988901  [  128/30013]\n",
      "loss: 0.874874  [12928/30013]\n",
      "loss: 0.999246  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.2%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.962035  [  128/30013]\n",
      "loss: 0.851103  [12928/30013]\n",
      "loss: 0.976874  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.1%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.934829  [  128/30013]\n",
      "loss: 0.827901  [12928/30013]\n",
      "loss: 0.953136  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.0%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.907205  [  128/30013]\n",
      "loss: 0.804865  [12928/30013]\n",
      "loss: 0.927681  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.9%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.879082  [  128/30013]\n",
      "loss: 0.781580  [12928/30013]\n",
      "loss: 0.901067  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.9%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.850348  [  128/30013]\n",
      "loss: 0.757833  [12928/30013]\n",
      "loss: 0.874034  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.8%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.820915  [  128/30013]\n",
      "loss: 0.733864  [12928/30013]\n",
      "loss: 0.846646  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.8%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.791179  [  128/30013]\n",
      "loss: 0.710246  [12928/30013]\n",
      "loss: 0.818680  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.8%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.761696  [  128/30013]\n",
      "loss: 0.687347  [12928/30013]\n",
      "loss: 0.790123  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.9%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.732723  [  128/30013]\n",
      "loss: 0.665174  [12928/30013]\n",
      "loss: 0.761347  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.9%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.704130  [  128/30013]\n",
      "loss: 0.643424  [12928/30013]\n",
      "loss: 0.732916  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.675419  [  128/30013]\n",
      "loss: 0.621710  [12928/30013]\n",
      "loss: 0.705155  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.1%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.646115  [  128/30013]\n",
      "loss: 0.599921  [12928/30013]\n",
      "loss: 0.678027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.3%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.616003  [  128/30013]\n",
      "loss: 0.578192  [12928/30013]\n",
      "loss: 0.651434  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.4%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.585402  [  128/30013]\n",
      "loss: 0.556731  [12928/30013]\n",
      "loss: 0.625169  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.5%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.555101  [  128/30013]\n",
      "loss: 0.535505  [12928/30013]\n",
      "loss: 0.599152  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.8%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.525484  [  128/30013]\n",
      "loss: 0.514096  [12928/30013]\n",
      "loss: 0.573473  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.0%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.496156  [  128/30013]\n",
      "loss: 0.491736  [12928/30013]\n",
      "loss: 0.548031  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.1%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.466588  [  128/30013]\n",
      "loss: 0.467964  [12928/30013]\n",
      "loss: 0.522334  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 86.4%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.436686  [  128/30013]\n",
      "loss: 0.443092  [12928/30013]\n",
      "loss: 0.495938  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.6%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.406807  [  128/30013]\n",
      "loss: 0.417623  [12928/30013]\n",
      "loss: 0.468877  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.7%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.377556  [  128/30013]\n",
      "loss: 0.391741  [12928/30013]\n",
      "loss: 0.441584  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.9%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.349262  [  128/30013]\n",
      "loss: 0.365548  [12928/30013]\n",
      "loss: 0.414565  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.8%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.322111  [  128/30013]\n",
      "loss: 0.339333  [12928/30013]\n",
      "loss: 0.388069  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.8%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.296424  [  128/30013]\n",
      "loss: 0.313835  [12928/30013]\n",
      "loss: 0.361818  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.8%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.272431  [  128/30013]\n",
      "loss: 0.289897  [12928/30013]\n",
      "loss: 0.335651  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.6%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.250447  [  128/30013]\n",
      "loss: 0.267649  [12928/30013]\n",
      "loss: 0.309604  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.4%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.230570  [  128/30013]\n",
      "loss: 0.246575  [12928/30013]\n",
      "loss: 0.283800  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.2%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.212314  [  128/30013]\n",
      "loss: 0.225752  [12928/30013]\n",
      "loss: 0.258871  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.9%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.195215  [  128/30013]\n",
      "loss: 0.205037  [12928/30013]\n",
      "loss: 0.235325  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.179260  [  128/30013]\n",
      "loss: 0.185585  [12928/30013]\n",
      "loss: 0.213412  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.164398  [  128/30013]\n",
      "loss: 0.167775  [12928/30013]\n",
      "loss: 0.192873  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.7%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.150315  [  128/30013]\n",
      "loss: 0.151133  [12928/30013]\n",
      "loss: 0.173462  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.2%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.136563  [  128/30013]\n",
      "loss: 0.136173  [12928/30013]\n",
      "loss: 0.155210  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.5%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.123428  [  128/30013]\n",
      "loss: 0.122625  [12928/30013]\n",
      "loss: 0.137990  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.111376  [  128/30013]\n",
      "loss: 0.109896  [12928/30013]\n",
      "loss: 0.121872  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.100424  [  128/30013]\n",
      "loss: 0.098112  [12928/30013]\n",
      "loss: 0.107287  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.090389  [  128/30013]\n",
      "loss: 0.087526  [12928/30013]\n",
      "loss: 0.094609  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.081420  [  128/30013]\n",
      "loss: 0.077914  [12928/30013]\n",
      "loss: 0.083407  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.073356  [  128/30013]\n",
      "loss: 0.069110  [12928/30013]\n",
      "loss: 0.073204  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.065831  [  128/30013]\n",
      "loss: 0.061054  [12928/30013]\n",
      "loss: 0.064110  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.058715  [  128/30013]\n",
      "loss: 0.053781  [12928/30013]\n",
      "loss: 0.056359  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.051849  [  128/30013]\n",
      "loss: 0.047303  [12928/30013]\n",
      "loss: 0.049776  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.045392  [  128/30013]\n",
      "loss: 0.041462  [12928/30013]\n",
      "loss: 0.043858  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.039637  [  128/30013]\n",
      "loss: 0.036445  [12928/30013]\n",
      "loss: 0.038698  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.034749  [  128/30013]\n",
      "loss: 0.031992  [12928/30013]\n",
      "loss: 0.033990  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.030378  [  128/30013]\n",
      "loss: 0.028156  [12928/30013]\n",
      "loss: 0.029738  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.026702  [  128/30013]\n",
      "loss: 0.024918  [12928/30013]\n",
      "loss: 0.026019  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.023484  [  128/30013]\n",
      "loss: 0.021962  [12928/30013]\n",
      "loss: 0.022760  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.020688  [  128/30013]\n",
      "loss: 0.019350  [12928/30013]\n",
      "loss: 0.019886  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.018255  [  128/30013]\n",
      "loss: 0.017002  [12928/30013]\n",
      "loss: 0.017390  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.016153  [  128/30013]\n",
      "loss: 0.014971  [12928/30013]\n",
      "loss: 0.015183  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.014291  [  128/30013]\n",
      "loss: 0.013213  [12928/30013]\n",
      "loss: 0.013276  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.012670  [  128/30013]\n",
      "loss: 0.011689  [12928/30013]\n",
      "loss: 0.011624  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.011243  [  128/30013]\n",
      "loss: 0.010360  [12928/30013]\n",
      "loss: 0.010175  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.009953  [  128/30013]\n",
      "loss: 0.009190  [12928/30013]\n",
      "loss: 0.008934  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.008827  [  128/30013]\n",
      "loss: 0.008156  [12928/30013]\n",
      "loss: 0.007859  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.007831  [  128/30013]\n",
      "loss: 0.007233  [12928/30013]\n",
      "loss: 0.006922  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.006942  [  128/30013]\n",
      "loss: 0.006410  [12928/30013]\n",
      "loss: 0.006103  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.006149  [  128/30013]\n",
      "loss: 0.005678  [12928/30013]\n",
      "loss: 0.005384  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.005441  [  128/30013]\n",
      "loss: 0.005028  [12928/30013]\n",
      "loss: 0.004751  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.004812  [  128/30013]\n",
      "loss: 0.004451  [12928/30013]\n",
      "loss: 0.004195  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.004256  [  128/30013]\n",
      "loss: 0.003938  [12928/30013]\n",
      "loss: 0.003705  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.003764  [  128/30013]\n",
      "loss: 0.003482  [12928/30013]\n",
      "loss: 0.003273  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.003331  [  128/30013]\n",
      "loss: 0.003077  [12928/30013]\n",
      "loss: 0.002891  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.002949  [  128/30013]\n",
      "loss: 0.002718  [12928/30013]\n",
      "loss: 0.002554  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.002612  [  128/30013]\n",
      "loss: 0.002400  [12928/30013]\n",
      "loss: 0.002256  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.002317  [  128/30013]\n",
      "loss: 0.002119  [12928/30013]\n",
      "loss: 0.001993  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.002057  [  128/30013]\n",
      "loss: 0.001870  [12928/30013]\n",
      "loss: 0.001761  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.001829  [  128/30013]\n",
      "loss: 0.001650  [12928/30013]\n",
      "loss: 0.001556  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.001629  [  128/30013]\n",
      "loss: 0.001455  [12928/30013]\n",
      "loss: 0.001375  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.001452  [  128/30013]\n",
      "loss: 0.001283  [12928/30013]\n",
      "loss: 0.001215  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.001294  [  128/30013]\n",
      "loss: 0.001130  [12928/30013]\n",
      "loss: 0.001075  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.001153  [  128/30013]\n",
      "loss: 0.000995  [12928/30013]\n",
      "loss: 0.000951  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.001028  [  128/30013]\n",
      "loss: 0.000875  [12928/30013]\n",
      "loss: 0.000843  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000916  [  128/30013]\n",
      "loss: 0.000770  [12928/30013]\n",
      "loss: 0.000747  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000816  [  128/30013]\n",
      "loss: 0.000678  [12928/30013]\n",
      "loss: 0.000662  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000727  [  128/30013]\n",
      "loss: 0.000597  [12928/30013]\n",
      "loss: 0.000587  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000647  [  128/30013]\n",
      "loss: 0.000526  [12928/30013]\n",
      "loss: 0.000520  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000576  [  128/30013]\n",
      "loss: 0.000463  [12928/30013]\n",
      "loss: 0.000460  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000512  [  128/30013]\n",
      "loss: 0.000407  [12928/30013]\n",
      "loss: 0.000406  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 72.5%, Avg loss: 1.127026 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 79.4%, Avg loss: 0.796255 \n",
      "\n",
      " Error: \n",
      " Accuracy: 79.4%  \n",
      "\n",
      "AUC value is: 0.5687765601369319\n",
      "Accuracy is: 0.50815\n",
      " Error: \n",
      " Accuracy: 79.4%  \n",
      "\n",
      "AUC value is: 0.5737114916191579\n",
      "Accuracy is: 0.5196333333333333\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.611197  [  128/30013]\n",
      "loss: 2.974408  [12928/30013]\n",
      "loss: 1.973853  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 21.8%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.973865  [  128/30013]\n",
      "loss: 1.560466  [12928/30013]\n",
      "loss: 1.421757  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 49.1%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.618805  [  128/30013]\n",
      "loss: 1.320761  [12928/30013]\n",
      "loss: 1.229180  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.9%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.521904  [  128/30013]\n",
      "loss: 1.192492  [12928/30013]\n",
      "loss: 1.143610  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 58.2%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.466026  [  128/30013]\n",
      "loss: 1.124184  [12928/30013]\n",
      "loss: 1.094502  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.0%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.420054  [  128/30013]\n",
      "loss: 1.073563  [12928/30013]\n",
      "loss: 1.057952  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.3%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.381174  [  128/30013]\n",
      "loss: 1.029487  [12928/30013]\n",
      "loss: 1.023145  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.6%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.349409  [  128/30013]\n",
      "loss: 0.992301  [12928/30013]\n",
      "loss: 0.983781  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.0%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.319174  [  128/30013]\n",
      "loss: 0.958372  [12928/30013]\n",
      "loss: 0.945487  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.1%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.278821  [  128/30013]\n",
      "loss: 0.924142  [12928/30013]\n",
      "loss: 0.908912  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.1%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.231057  [  128/30013]\n",
      "loss: 0.888373  [12928/30013]\n",
      "loss: 0.870635  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.1%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.183643  [  128/30013]\n",
      "loss: 0.850074  [12928/30013]\n",
      "loss: 0.833846  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.3%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.136959  [  128/30013]\n",
      "loss: 0.815974  [12928/30013]\n",
      "loss: 0.805400  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.5%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.089743  [  128/30013]\n",
      "loss: 0.788947  [12928/30013]\n",
      "loss: 0.785167  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.5%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.049466  [  128/30013]\n",
      "loss: 0.766038  [12928/30013]\n",
      "loss: 0.766283  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.6%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.015825  [  128/30013]\n",
      "loss: 0.746233  [12928/30013]\n",
      "loss: 0.745636  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.5%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.983889  [  128/30013]\n",
      "loss: 0.728406  [12928/30013]\n",
      "loss: 0.724192  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.4%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.949779  [  128/30013]\n",
      "loss: 0.711065  [12928/30013]\n",
      "loss: 0.703427  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.1%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.914563  [  128/30013]\n",
      "loss: 0.692830  [12928/30013]\n",
      "loss: 0.684318  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.9%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.879997  [  128/30013]\n",
      "loss: 0.673746  [12928/30013]\n",
      "loss: 0.667003  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.9%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.847668  [  128/30013]\n",
      "loss: 0.654337  [12928/30013]\n",
      "loss: 0.651151  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.7%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.817828  [  128/30013]\n",
      "loss: 0.634642  [12928/30013]\n",
      "loss: 0.635765  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.4%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.789474  [  128/30013]\n",
      "loss: 0.614270  [12928/30013]\n",
      "loss: 0.619603  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.2%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.761580  [  128/30013]\n",
      "loss: 0.593137  [12928/30013]\n",
      "loss: 0.601572  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.1%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.733267  [  128/30013]\n",
      "loss: 0.571766  [12928/30013]\n",
      "loss: 0.580571  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.9%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.704499  [  128/30013]\n",
      "loss: 0.551085  [12928/30013]\n",
      "loss: 0.556404  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.9%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.675915  [  128/30013]\n",
      "loss: 0.531050  [12928/30013]\n",
      "loss: 0.530056  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.7%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.648215  [  128/30013]\n",
      "loss: 0.511113  [12928/30013]\n",
      "loss: 0.502581  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.6%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.621788  [  128/30013]\n",
      "loss: 0.491021  [12928/30013]\n",
      "loss: 0.474903  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.5%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.596520  [  128/30013]\n",
      "loss: 0.470771  [12928/30013]\n",
      "loss: 0.447863  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.5%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.572265  [  128/30013]\n",
      "loss: 0.450419  [12928/30013]\n",
      "loss: 0.422230  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.5%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.548870  [  128/30013]\n",
      "loss: 0.429889  [12928/30013]\n",
      "loss: 0.398134  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 86.5%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.525975  [  128/30013]\n",
      "loss: 0.409157  [12928/30013]\n",
      "loss: 0.375072  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.4%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.503295  [  128/30013]\n",
      "loss: 0.388385  [12928/30013]\n",
      "loss: 0.352372  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.4%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.480924  [  128/30013]\n",
      "loss: 0.367871  [12928/30013]\n",
      "loss: 0.329620  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.3%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.459106  [  128/30013]\n",
      "loss: 0.347900  [12928/30013]\n",
      "loss: 0.306915  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.1%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.437872  [  128/30013]\n",
      "loss: 0.328773  [12928/30013]\n",
      "loss: 0.284755  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.9%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.416979  [  128/30013]\n",
      "loss: 0.310625  [12928/30013]\n",
      "loss: 0.263603  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.8%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.395950  [  128/30013]\n",
      "loss: 0.293091  [12928/30013]\n",
      "loss: 0.243633  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.6%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.374346  [  128/30013]\n",
      "loss: 0.275675  [12928/30013]\n",
      "loss: 0.224770  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.3%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.351786  [  128/30013]\n",
      "loss: 0.258256  [12928/30013]\n",
      "loss: 0.206865  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.327961  [  128/30013]\n",
      "loss: 0.240965  [12928/30013]\n",
      "loss: 0.189943  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.7%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.303086  [  128/30013]\n",
      "loss: 0.223963  [12928/30013]\n",
      "loss: 0.174254  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.4%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.278258  [  128/30013]\n",
      "loss: 0.207254  [12928/30013]\n",
      "loss: 0.159944  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.254561  [  128/30013]\n",
      "loss: 0.190809  [12928/30013]\n",
      "loss: 0.146923  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.6%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.231941  [  128/30013]\n",
      "loss: 0.174795  [12928/30013]\n",
      "loss: 0.135015  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.210306  [  128/30013]\n",
      "loss: 0.159496  [12928/30013]\n",
      "loss: 0.124120  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.5%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.189830  [  128/30013]\n",
      "loss: 0.145084  [12928/30013]\n",
      "loss: 0.114145  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.9%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.170540  [  128/30013]\n",
      "loss: 0.131705  [12928/30013]\n",
      "loss: 0.104795  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.3%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.152769  [  128/30013]\n",
      "loss: 0.119276  [12928/30013]\n",
      "loss: 0.095686  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.136914  [  128/30013]\n",
      "loss: 0.107360  [12928/30013]\n",
      "loss: 0.086658  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.122594  [  128/30013]\n",
      "loss: 0.096254  [12928/30013]\n",
      "loss: 0.078160  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.109549  [  128/30013]\n",
      "loss: 0.086390  [12928/30013]\n",
      "loss: 0.070162  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.097413  [  128/30013]\n",
      "loss: 0.077243  [12928/30013]\n",
      "loss: 0.062635  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.086195  [  128/30013]\n",
      "loss: 0.068859  [12928/30013]\n",
      "loss: 0.055739  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.076311  [  128/30013]\n",
      "loss: 0.061485  [12928/30013]\n",
      "loss: 0.049628  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.067930  [  128/30013]\n",
      "loss: 0.054958  [12928/30013]\n",
      "loss: 0.044366  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.060621  [  128/30013]\n",
      "loss: 0.049058  [12928/30013]\n",
      "loss: 0.039738  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.052895  [  128/30013]\n",
      "loss: 0.043741  [12928/30013]\n",
      "loss: 0.035464  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.047315  [  128/30013]\n",
      "loss: 0.039270  [12928/30013]\n",
      "loss: 0.031543  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.042089  [  128/30013]\n",
      "loss: 0.035094  [12928/30013]\n",
      "loss: 0.028086  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.037582  [  128/30013]\n",
      "loss: 0.031302  [12928/30013]\n",
      "loss: 0.024957  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.033556  [  128/30013]\n",
      "loss: 0.028010  [12928/30013]\n",
      "loss: 0.022120  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.029859  [  128/30013]\n",
      "loss: 0.025034  [12928/30013]\n",
      "loss: 0.019578  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.026517  [  128/30013]\n",
      "loss: 0.022297  [12928/30013]\n",
      "loss: 0.017264  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.023542  [  128/30013]\n",
      "loss: 0.019841  [12928/30013]\n",
      "loss: 0.015241  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.020799  [  128/30013]\n",
      "loss: 0.017578  [12928/30013]\n",
      "loss: 0.013462  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.018379  [  128/30013]\n",
      "loss: 0.015552  [12928/30013]\n",
      "loss: 0.011882  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.016227  [  128/30013]\n",
      "loss: 0.013750  [12928/30013]\n",
      "loss: 0.010479  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.014334  [  128/30013]\n",
      "loss: 0.012145  [12928/30013]\n",
      "loss: 0.009213  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.012649  [  128/30013]\n",
      "loss: 0.010730  [12928/30013]\n",
      "loss: 0.008083  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.011147  [  128/30013]\n",
      "loss: 0.009467  [12928/30013]\n",
      "loss: 0.007087  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.009829  [  128/30013]\n",
      "loss: 0.008325  [12928/30013]\n",
      "loss: 0.006209  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.008675  [  128/30013]\n",
      "loss: 0.007293  [12928/30013]\n",
      "loss: 0.005429  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.007653  [  128/30013]\n",
      "loss: 0.006374  [12928/30013]\n",
      "loss: 0.004743  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.006745  [  128/30013]\n",
      "loss: 0.005563  [12928/30013]\n",
      "loss: 0.004149  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.005942  [  128/30013]\n",
      "loss: 0.004860  [12928/30013]\n",
      "loss: 0.003637  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.005231  [  128/30013]\n",
      "loss: 0.004257  [12928/30013]\n",
      "loss: 0.003195  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.004606  [  128/30013]\n",
      "loss: 0.003736  [12928/30013]\n",
      "loss: 0.002811  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.004057  [  128/30013]\n",
      "loss: 0.003285  [12928/30013]\n",
      "loss: 0.002476  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.003575  [  128/30013]\n",
      "loss: 0.002892  [12928/30013]\n",
      "loss: 0.002183  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.003152  [  128/30013]\n",
      "loss: 0.002549  [12928/30013]\n",
      "loss: 0.001928  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.002779  [  128/30013]\n",
      "loss: 0.002249  [12928/30013]\n",
      "loss: 0.001704  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.002451  [  128/30013]\n",
      "loss: 0.001986  [12928/30013]\n",
      "loss: 0.001509  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.002162  [  128/30013]\n",
      "loss: 0.001757  [12928/30013]\n",
      "loss: 0.001336  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.001908  [  128/30013]\n",
      "loss: 0.001555  [12928/30013]\n",
      "loss: 0.001184  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.001684  [  128/30013]\n",
      "loss: 0.001376  [12928/30013]\n",
      "loss: 0.001050  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.001487  [  128/30013]\n",
      "loss: 0.001219  [12928/30013]\n",
      "loss: 0.000931  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.001313  [  128/30013]\n",
      "loss: 0.001079  [12928/30013]\n",
      "loss: 0.000826  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.001161  [  128/30013]\n",
      "loss: 0.000955  [12928/30013]\n",
      "loss: 0.000732  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.001027  [  128/30013]\n",
      "loss: 0.000845  [12928/30013]\n",
      "loss: 0.000650  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000909  [  128/30013]\n",
      "loss: 0.000747  [12928/30013]\n",
      "loss: 0.000577  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000806  [  128/30013]\n",
      "loss: 0.000660  [12928/30013]\n",
      "loss: 0.000512  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000714  [  128/30013]\n",
      "loss: 0.000583  [12928/30013]\n",
      "loss: 0.000455  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000633  [  128/30013]\n",
      "loss: 0.000514  [12928/30013]\n",
      "loss: 0.000404  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000561  [  128/30013]\n",
      "loss: 0.000454  [12928/30013]\n",
      "loss: 0.000359  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000498  [  128/30013]\n",
      "loss: 0.000400  [12928/30013]\n",
      "loss: 0.000319  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000442  [  128/30013]\n",
      "loss: 0.000352  [12928/30013]\n",
      "loss: 0.000283  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000392  [  128/30013]\n",
      "loss: 0.000310  [12928/30013]\n",
      "loss: 0.000251  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000347  [  128/30013]\n",
      "loss: 0.000273  [12928/30013]\n",
      "loss: 0.000223  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 77.6%, Avg loss: 0.954459 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 85.2%, Avg loss: 0.597160 \n",
      "\n",
      " Error: \n",
      " Accuracy: 85.2%  \n",
      "\n",
      "AUC value is: 0.6078568163642244\n",
      "Accuracy is: 0.5313\n",
      " Error: \n",
      " Accuracy: 85.2%  \n",
      "\n",
      "AUC value is: 0.6016803985377637\n",
      "Accuracy is: 0.5363\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.603213  [  128/30013]\n",
      "loss: 2.945409  [12928/30013]\n",
      "loss: 2.007182  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 22.6%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.859908  [  128/30013]\n",
      "loss: 1.342665  [12928/30013]\n",
      "loss: 1.397578  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 52.6%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.431524  [  128/30013]\n",
      "loss: 1.097047  [12928/30013]\n",
      "loss: 1.202439  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.0%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.276682  [  128/30013]\n",
      "loss: 0.973366  [12928/30013]\n",
      "loss: 1.106499  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.2%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.176584  [  128/30013]\n",
      "loss: 0.897432  [12928/30013]\n",
      "loss: 1.051619  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.4%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.101741  [  128/30013]\n",
      "loss: 0.848809  [12928/30013]\n",
      "loss: 0.995500  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.2%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.047548  [  128/30013]\n",
      "loss: 0.806368  [12928/30013]\n",
      "loss: 0.940308  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.8%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.000896  [  128/30013]\n",
      "loss: 0.769962  [12928/30013]\n",
      "loss: 0.898096  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.2%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.958307  [  128/30013]\n",
      "loss: 0.738355  [12928/30013]\n",
      "loss: 0.865548  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.4%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.916601  [  128/30013]\n",
      "loss: 0.709510  [12928/30013]\n",
      "loss: 0.833118  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.5%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.874170  [  128/30013]\n",
      "loss: 0.683583  [12928/30013]\n",
      "loss: 0.797090  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.6%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.826919  [  128/30013]\n",
      "loss: 0.659245  [12928/30013]\n",
      "loss: 0.759791  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.7%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.777177  [  128/30013]\n",
      "loss: 0.634574  [12928/30013]\n",
      "loss: 0.728291  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.7%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.734559  [  128/30013]\n",
      "loss: 0.612146  [12928/30013]\n",
      "loss: 0.701525  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.8%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.700204  [  128/30013]\n",
      "loss: 0.592871  [12928/30013]\n",
      "loss: 0.675750  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.7%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.670389  [  128/30013]\n",
      "loss: 0.575200  [12928/30013]\n",
      "loss: 0.649951  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.5%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.643013  [  128/30013]\n",
      "loss: 0.557742  [12928/30013]\n",
      "loss: 0.625744  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.3%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.618302  [  128/30013]\n",
      "loss: 0.540054  [12928/30013]\n",
      "loss: 0.603070  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.1%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.595550  [  128/30013]\n",
      "loss: 0.522401  [12928/30013]\n",
      "loss: 0.581857  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.0%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.573229  [  128/30013]\n",
      "loss: 0.504922  [12928/30013]\n",
      "loss: 0.561760  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.8%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.550451  [  128/30013]\n",
      "loss: 0.487534  [12928/30013]\n",
      "loss: 0.542346  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.6%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.527576  [  128/30013]\n",
      "loss: 0.470057  [12928/30013]\n",
      "loss: 0.522804  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.4%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.505321  [  128/30013]\n",
      "loss: 0.452944  [12928/30013]\n",
      "loss: 0.502602  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.3%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.484069  [  128/30013]\n",
      "loss: 0.436541  [12928/30013]\n",
      "loss: 0.481676  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.0%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.463574  [  128/30013]\n",
      "loss: 0.420751  [12928/30013]\n",
      "loss: 0.460106  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.7%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.443471  [  128/30013]\n",
      "loss: 0.405417  [12928/30013]\n",
      "loss: 0.437859  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 86.6%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.423452  [  128/30013]\n",
      "loss: 0.390470  [12928/30013]\n",
      "loss: 0.415106  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.4%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.403272  [  128/30013]\n",
      "loss: 0.375875  [12928/30013]\n",
      "loss: 0.392394  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.3%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.382954  [  128/30013]\n",
      "loss: 0.361619  [12928/30013]\n",
      "loss: 0.370310  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.2%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.362632  [  128/30013]\n",
      "loss: 0.347692  [12928/30013]\n",
      "loss: 0.349278  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.9%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.342549  [  128/30013]\n",
      "loss: 0.334072  [12928/30013]\n",
      "loss: 0.329517  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.8%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.323096  [  128/30013]\n",
      "loss: 0.320678  [12928/30013]\n",
      "loss: 0.310907  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.5%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.304399  [  128/30013]\n",
      "loss: 0.307497  [12928/30013]\n",
      "loss: 0.293117  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.2%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.286137  [  128/30013]\n",
      "loss: 0.294584  [12928/30013]\n",
      "loss: 0.275581  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.0%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.267784  [  128/30013]\n",
      "loss: 0.281993  [12928/30013]\n",
      "loss: 0.257644  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.7%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.249026  [  128/30013]\n",
      "loss: 0.269720  [12928/30013]\n",
      "loss: 0.239211  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.4%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.230065  [  128/30013]\n",
      "loss: 0.257574  [12928/30013]\n",
      "loss: 0.220907  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.1%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.211256  [  128/30013]\n",
      "loss: 0.245265  [12928/30013]\n",
      "loss: 0.203671  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.7%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.193174  [  128/30013]\n",
      "loss: 0.232497  [12928/30013]\n",
      "loss: 0.187768  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.3%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.176824  [  128/30013]\n",
      "loss: 0.219778  [12928/30013]\n",
      "loss: 0.172821  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.7%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.162252  [  128/30013]\n",
      "loss: 0.206788  [12928/30013]\n",
      "loss: 0.159049  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.2%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.148959  [  128/30013]\n",
      "loss: 0.193433  [12928/30013]\n",
      "loss: 0.145913  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.6%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.137289  [  128/30013]\n",
      "loss: 0.180015  [12928/30013]\n",
      "loss: 0.133225  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.126385  [  128/30013]\n",
      "loss: 0.165612  [12928/30013]\n",
      "loss: 0.120963  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.4%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.115812  [  128/30013]\n",
      "loss: 0.150526  [12928/30013]\n",
      "loss: 0.109381  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.106207  [  128/30013]\n",
      "loss: 0.134934  [12928/30013]\n",
      "loss: 0.098570  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.9%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.097029  [  128/30013]\n",
      "loss: 0.118512  [12928/30013]\n",
      "loss: 0.088307  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.088294  [  128/30013]\n",
      "loss: 0.104711  [12928/30013]\n",
      "loss: 0.078571  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.080071  [  128/30013]\n",
      "loss: 0.093111  [12928/30013]\n",
      "loss: 0.069542  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.072037  [  128/30013]\n",
      "loss: 0.082742  [12928/30013]\n",
      "loss: 0.061347  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.064487  [  128/30013]\n",
      "loss: 0.073585  [12928/30013]\n",
      "loss: 0.054088  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.057678  [  128/30013]\n",
      "loss: 0.065369  [12928/30013]\n",
      "loss: 0.048394  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.051698  [  128/30013]\n",
      "loss: 0.057581  [12928/30013]\n",
      "loss: 0.043817  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.046153  [  128/30013]\n",
      "loss: 0.050327  [12928/30013]\n",
      "loss: 0.039170  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.041540  [  128/30013]\n",
      "loss: 0.044654  [12928/30013]\n",
      "loss: 0.034778  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.037208  [  128/30013]\n",
      "loss: 0.039866  [12928/30013]\n",
      "loss: 0.030917  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.033241  [  128/30013]\n",
      "loss: 0.035447  [12928/30013]\n",
      "loss: 0.027526  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.029623  [  128/30013]\n",
      "loss: 0.031421  [12928/30013]\n",
      "loss: 0.024655  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.026284  [  128/30013]\n",
      "loss: 0.028096  [12928/30013]\n",
      "loss: 0.022153  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.023190  [  128/30013]\n",
      "loss: 0.025278  [12928/30013]\n",
      "loss: 0.019864  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.020509  [  128/30013]\n",
      "loss: 0.022596  [12928/30013]\n",
      "loss: 0.017720  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.018141  [  128/30013]\n",
      "loss: 0.019919  [12928/30013]\n",
      "loss: 0.015719  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.016011  [  128/30013]\n",
      "loss: 0.017372  [12928/30013]\n",
      "loss: 0.013891  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.014017  [  128/30013]\n",
      "loss: 0.015085  [12928/30013]\n",
      "loss: 0.012314  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.012252  [  128/30013]\n",
      "loss: 0.013116  [12928/30013]\n",
      "loss: 0.010950  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.010758  [  128/30013]\n",
      "loss: 0.011462  [12928/30013]\n",
      "loss: 0.009755  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.009513  [  128/30013]\n",
      "loss: 0.010027  [12928/30013]\n",
      "loss: 0.008686  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.008452  [  128/30013]\n",
      "loss: 0.008799  [12928/30013]\n",
      "loss: 0.007692  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.007534  [  128/30013]\n",
      "loss: 0.007738  [12928/30013]\n",
      "loss: 0.006799  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.006745  [  128/30013]\n",
      "loss: 0.006808  [12928/30013]\n",
      "loss: 0.005993  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.006048  [  128/30013]\n",
      "loss: 0.005990  [12928/30013]\n",
      "loss: 0.005300  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.005419  [  128/30013]\n",
      "loss: 0.005278  [12928/30013]\n",
      "loss: 0.004689  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.004845  [  128/30013]\n",
      "loss: 0.004651  [12928/30013]\n",
      "loss: 0.004150  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.004321  [  128/30013]\n",
      "loss: 0.004099  [12928/30013]\n",
      "loss: 0.003672  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.003844  [  128/30013]\n",
      "loss: 0.003611  [12928/30013]\n",
      "loss: 0.003250  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.003412  [  128/30013]\n",
      "loss: 0.003179  [12928/30013]\n",
      "loss: 0.002877  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.003024  [  128/30013]\n",
      "loss: 0.002797  [12928/30013]\n",
      "loss: 0.002548  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.002677  [  128/30013]\n",
      "loss: 0.002460  [12928/30013]\n",
      "loss: 0.002256  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.002370  [  128/30013]\n",
      "loss: 0.002162  [12928/30013]\n",
      "loss: 0.001998  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.002099  [  128/30013]\n",
      "loss: 0.001899  [12928/30013]\n",
      "loss: 0.001769  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.001859  [  128/30013]\n",
      "loss: 0.001667  [12928/30013]\n",
      "loss: 0.001565  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.001647  [  128/30013]\n",
      "loss: 0.001464  [12928/30013]\n",
      "loss: 0.001384  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.001461  [  128/30013]\n",
      "loss: 0.001286  [12928/30013]\n",
      "loss: 0.001224  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.001296  [  128/30013]\n",
      "loss: 0.001131  [12928/30013]\n",
      "loss: 0.001083  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.001151  [  128/30013]\n",
      "loss: 0.000995  [12928/30013]\n",
      "loss: 0.000960  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.001023  [  128/30013]\n",
      "loss: 0.000876  [12928/30013]\n",
      "loss: 0.000851  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000909  [  128/30013]\n",
      "loss: 0.000771  [12928/30013]\n",
      "loss: 0.000754  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000808  [  128/30013]\n",
      "loss: 0.000679  [12928/30013]\n",
      "loss: 0.000669  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000718  [  128/30013]\n",
      "loss: 0.000598  [12928/30013]\n",
      "loss: 0.000593  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000638  [  128/30013]\n",
      "loss: 0.000526  [12928/30013]\n",
      "loss: 0.000526  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000567  [  128/30013]\n",
      "loss: 0.000463  [12928/30013]\n",
      "loss: 0.000467  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000503  [  128/30013]\n",
      "loss: 0.000408  [12928/30013]\n",
      "loss: 0.000414  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000447  [  128/30013]\n",
      "loss: 0.000359  [12928/30013]\n",
      "loss: 0.000367  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000396  [  128/30013]\n",
      "loss: 0.000316  [12928/30013]\n",
      "loss: 0.000326  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000351  [  128/30013]\n",
      "loss: 0.000279  [12928/30013]\n",
      "loss: 0.000289  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000311  [  128/30013]\n",
      "loss: 0.000246  [12928/30013]\n",
      "loss: 0.000256  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000276  [  128/30013]\n",
      "loss: 0.000217  [12928/30013]\n",
      "loss: 0.000227  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000244  [  128/30013]\n",
      "loss: 0.000191  [12928/30013]\n",
      "loss: 0.000201  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000215  [  128/30013]\n",
      "loss: 0.000169  [12928/30013]\n",
      "loss: 0.000178  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000190  [  128/30013]\n",
      "loss: 0.000149  [12928/30013]\n",
      "loss: 0.000157  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n",
      "Test Error: \n",
      " Accuracy: 80.1%, Avg loss: 0.882500 \n",
      "\n",
      "Test Error: \n",
      " Accuracy: 87.9%, Avg loss: 0.522908 \n",
      "\n",
      " Error: \n",
      " Accuracy: 87.9%  \n",
      "\n",
      "AUC value is: 0.6469527492611273\n",
      "Accuracy is: 0.5595\n",
      " Error: \n",
      " Accuracy: 87.9%  \n",
      "\n",
      "AUC value is: 0.6176322870887294\n",
      "Accuracy is: 0.55075\n"
     ]
    }
   ],
   "source": [
    "for epsilon in epsilon_list:\n",
    "    x = X_data[train_keep[tar_model]]\n",
    "    y = Y_data[train_keep[tar_model]]\n",
    "    mPCA = dp_PCA(600, eps=epsilon, delta=1e-6, complete_dp=True)\n",
    "    X_train = mPCA.fit_transform(x)\n",
    "    x = X_train\n",
    "    y = y\n",
    "    # 训练dp模型\n",
    "    train_data = CustomDataset(x, y, model_transform)\n",
    "    train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "    DPTargetModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "    DPTargetModel.to(device)\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    optimizer = torch.optim.Adam(DPTargetModel.parameters(), lr=LEARNING_RATE)\n",
    "    for t in range(epochs):\n",
    "        print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "        train(train_dataloader, DPTargetModel, loss_fn, optimizer, device)\n",
    "    print(\"Done!\")\n",
    "    x_test_data = X_tmp[90000:110000]\n",
    "    y_test_data = Y_tmp[90000:110000]\n",
    "    x_test_data= mPCA.transform(x_test_data.copy())\n",
    "    test_data = CustomDataset(x_test_data, y_test_data, model_transform)\n",
    "    test_dataloader = DataLoader(test_data, batch_size=batch_size)\n",
    "    X_data2= mPCA.transform(X_data.copy())\n",
    "    all_data2 = CustomDataset(X_data2, Y_data, model_transform)\n",
    "    all_dataloader2 = DataLoader(all_data2, batch_size=batch_size)\n",
    "    success = evaluate(test_dataloader, DPTargetModel, loss_fn, device)\n",
    "    dp_test_acc.append(success)\n",
    "\n",
    "    # 基线攻击\n",
    "    loss_fn = nn.CrossEntropyLoss()\n",
    "    pred_result = base_attack(all_dataloader2, DPTargetModel, loss_fn, device)\n",
    "\n",
    "    accuracy = metrics.accuracy_score(train_keep[tar_model], pred_result)\n",
    "    dp_base_attack_acc.append(accuracy)\n",
    "\n",
    "    pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    mem_clip = train_keep[tar_model][pri_risk_rank[:attack_num]]\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    dp_part_base_attack_acc.append(accuracy)\n",
    "\n",
    "    # loss_fn = nn.CrossEntropyLoss()\n",
    "    # pred_result = base_attack(all_dataloader2, CompareModel, loss_fn, device)\n",
    "    # pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    # mem_clip = train_keep[0][pri_risk_rank[:attack_num]]\n",
    "    # accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    # print(accuracy)\n",
    "\n",
    "    # LIRA攻击\n",
    "    _, score = get_score_from_model(all_dataloader2, DPTargetModel, device)\n",
    "    pred_result = LIRA_attack(train_keep, score_all, score, train_keep[tar_model])\n",
    "    accuracy = evaluate_ROC(pred_result, train_keep[tar_model], threshold=0)\n",
    "    dp_LIRA_attack_acc.append(accuracy)\n",
    "    \n",
    "    pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    mem_clip = train_keep[tar_model][pri_risk_rank[:attack_num]]\n",
    "    pred_clip = pred_clip > 0\n",
    "    accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    dp_part_LIRA_attack_acc.append(accuracy)\n",
    "    \n",
    "    # _, score = get_score_from_model(all_dataloader2, CompareModel, device)\n",
    "    # pred_result = LIRA_attack(train_keep, score_all, score, train_keep[0])\n",
    "    # evaluate_ROC(pred_result, train_keep[0], threshold=0)\n",
    "    # pred_clip = pred_result[pri_risk_rank[:attack_num]]\n",
    "    # mem_clip = train_keep[0][pri_risk_rank[:attack_num]]\n",
    "    # pred_clip = pred_clip > 0\n",
    "    # accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "    # print(accuracy)\n",
    "\n",
    "    # 影子模型攻击\n",
    "    # 提取数据集在模型上的置信度输出\n",
    "    targetX, _ = get_model_pred(all_dataloader2, DPTargetModel, device)\n",
    "    targetX = targetX.detach().cpu().numpy()\n",
    "    targetX = targetX.astype(np.float32)\n",
    "    \n",
    "    top_k = 3\n",
    "    if top_k:\n",
    "        # 仅使用概率向量的前3个值\n",
    "        targetX, _ = get_top_k_conf(top_k, targetX, targetX)\n",
    "\n",
    "    shadow_attack_data = CustomDataset(targetX, train_keep[0], attack_transform)\n",
    "    shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=False)\n",
    "    attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "    attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "      \n",
    "    accuracy = evaluate_ROC(attack_test_scores, attack_test_mem)\n",
    "    dp_shadow_attack_acc.append(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "7af1bf78-6fe1-449c-bb2d-5fbda1244e18",
   "metadata": {},
   "outputs": [],
   "source": [
    "# dp_test_acc = []\n",
    "# dp_LIRA_attack_acc = []\n",
    "# dp_base_attack_acc = []\n",
    "# dp_part_LIRA_attack_acc = []\n",
    "# dp_part_base_attack_acc = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "5a531d56-b4fd-4fe8-ae57-c1e571a30b05",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.0265,\n",
       " 0.02645,\n",
       " 0.0146,\n",
       " 0.02645,\n",
       " 0.03115,\n",
       " 0.0608,\n",
       " 0.1151,\n",
       " 0.3467,\n",
       " 0.52285,\n",
       " 0.651,\n",
       " 0.7254,\n",
       " 0.77635,\n",
       " 0.80125]"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_test_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "67dc8798-5240-4ec5-aae1-4cf73a9cb023",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.4905,\n",
       " 0.4904833333333333,\n",
       " 0.4904833333333333,\n",
       " 0.4904833333333333,\n",
       " 0.4904833333333333,\n",
       " 0.49051666666666666,\n",
       " 0.49056666666666665,\n",
       " 0.4909833333333333,\n",
       " 0.4921,\n",
       " 0.4986833333333333,\n",
       " 0.50815,\n",
       " 0.5313,\n",
       " 0.5595]"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_LIRA_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "3271aefe-dc12-4bc9-b965-56788416d291",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.49985,\n",
       " 0.49985,\n",
       " 0.5000333333333333,\n",
       " 0.49985,\n",
       " 0.4998,\n",
       " 0.5001,\n",
       " 0.5049166666666667,\n",
       " 0.5185,\n",
       " 0.5386166666666666,\n",
       " 0.55585,\n",
       " 0.57615,\n",
       " 0.5795166666666667,\n",
       " 0.5830833333333333]"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_base_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "4da8fb5c-39eb-43fd-812c-891424514a94",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.511,\n",
       " 0.5105,\n",
       " 0.5105,\n",
       " 0.5105,\n",
       " 0.5105,\n",
       " 0.5115,\n",
       " 0.5135,\n",
       " 0.5165,\n",
       " 0.5295,\n",
       " 0.5515,\n",
       " 0.589,\n",
       " 0.6635,\n",
       " 0.7355]"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_part_LIRA_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "d3d25227-90e3-45a1-a29f-eccd745bbeab",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.509,\n",
       " 0.509,\n",
       " 0.5085,\n",
       " 0.509,\n",
       " 0.5095,\n",
       " 0.5055,\n",
       " 0.5155,\n",
       " 0.5145,\n",
       " 0.5595,\n",
       " 0.5945,\n",
       " 0.6475,\n",
       " 0.7235,\n",
       " 0.76]"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_part_base_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "86100464-ada9-4b37-945f-f9af6df77050",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.49978333333333336,\n",
       " 0.49978333333333336,\n",
       " 0.49978333333333336,\n",
       " 0.49978333333333336,\n",
       " 0.49978333333333336,\n",
       " 0.49978333333333336,\n",
       " 0.4997666666666667,\n",
       " 0.5007833333333334,\n",
       " 0.5024166666666666,\n",
       " 0.50955,\n",
       " 0.5196333333333333,\n",
       " 0.5363,\n",
       " 0.55075]"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "dp_shadow_attack_acc"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ced67123-c6e9-4815-8651-c781a6c20ec9",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f32998c8-0d3f-4a08-87dd-a37391336d54",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "47ba0016-43fe-4fdb-b1f6-e861878d912f",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "d3dad6d5-788f-418e-b150-207c4ebc1854",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = X_data[mem_label]\n",
    "y = Y_data[mem_label]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "ee847c2e-d402-4bd5-8b02-452ca09766fa",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(30013, 600)"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "d573cd06-c4df-4b78-bf08-61e29e6d7fe6",
   "metadata": {},
   "outputs": [],
   "source": [
    "x = X_data[mem_label]\n",
    "y = Y_data[mem_label]\n",
    "mPCA = dp_PCA(600, eps=30, delta=1e-6, complete_dp=True)\n",
    "X_train = mPCA.fit_transform(x)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "66265cb8-6a1a-4930-ace9-3113e6ca5416",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.611985  [  128/30013]\n",
      "loss: 3.054693  [12928/30013]\n",
      "loss: 2.255503  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 19.1%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 2.144893  [  128/30013]\n",
      "loss: 1.740012  [12928/30013]\n",
      "loss: 1.746545  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 42.5%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.847383  [  128/30013]\n",
      "loss: 1.528387  [12928/30013]\n",
      "loss: 1.578063  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 47.7%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 1.728237  [  128/30013]\n",
      "loss: 1.438989  [12928/30013]\n",
      "loss: 1.513027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 50.2%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 1.657671  [  128/30013]\n",
      "loss: 1.375217  [12928/30013]\n",
      "loss: 1.472036  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 51.7%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 1.595664  [  128/30013]\n",
      "loss: 1.315481  [12928/30013]\n",
      "loss: 1.423626  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 53.3%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 1.526371  [  128/30013]\n",
      "loss: 1.271873  [12928/30013]\n",
      "loss: 1.372281  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 54.6%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 1.465404  [  128/30013]\n",
      "loss: 1.244935  [12928/30013]\n",
      "loss: 1.343395  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 55.8%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 1.419845  [  128/30013]\n",
      "loss: 1.217974  [12928/30013]\n",
      "loss: 1.322073  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 56.9%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 1.380351  [  128/30013]\n",
      "loss: 1.187046  [12928/30013]\n",
      "loss: 1.301853  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 58.0%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 1.345796  [  128/30013]\n",
      "loss: 1.154487  [12928/30013]\n",
      "loss: 1.279507  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 59.0%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 1.312502  [  128/30013]\n",
      "loss: 1.122083  [12928/30013]\n",
      "loss: 1.257027  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.2%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 1.279426  [  128/30013]\n",
      "loss: 1.094498  [12928/30013]\n",
      "loss: 1.236010  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.2%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 1.247313  [  128/30013]\n",
      "loss: 1.069587  [12928/30013]\n",
      "loss: 1.215603  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 61.9%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 1.214724  [  128/30013]\n",
      "loss: 1.043339  [12928/30013]\n",
      "loss: 1.193850  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 62.8%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 1.181960  [  128/30013]\n",
      "loss: 1.015788  [12928/30013]\n",
      "loss: 1.170035  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 63.7%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 1.149710  [  128/30013]\n",
      "loss: 0.987092  [12928/30013]\n",
      "loss: 1.145276  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 64.5%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 1.118411  [  128/30013]\n",
      "loss: 0.958347  [12928/30013]\n",
      "loss: 1.120396  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 65.4%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 1.088946  [  128/30013]\n",
      "loss: 0.930457  [12928/30013]\n",
      "loss: 1.095870  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 66.3%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 1.061206  [  128/30013]\n",
      "loss: 0.903506  [12928/30013]\n",
      "loss: 1.071200  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 67.1%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 1.035033  [  128/30013]\n",
      "loss: 0.877217  [12928/30013]\n",
      "loss: 1.046033  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.1%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 1.010376  [  128/30013]\n",
      "loss: 0.851932  [12928/30013]\n",
      "loss: 1.019926  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.9%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.986478  [  128/30013]\n",
      "loss: 0.828500  [12928/30013]\n",
      "loss: 0.993314  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 69.9%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.962384  [  128/30013]\n",
      "loss: 0.807096  [12928/30013]\n",
      "loss: 0.966715  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 70.8%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.937420  [  128/30013]\n",
      "loss: 0.786897  [12928/30013]\n",
      "loss: 0.940185  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 71.8%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.911456  [  128/30013]\n",
      "loss: 0.767084  [12928/30013]\n",
      "loss: 0.913710  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 72.9%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.884711  [  128/30013]\n",
      "loss: 0.747389  [12928/30013]\n",
      "loss: 0.887264  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 74.0%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.857300  [  128/30013]\n",
      "loss: 0.727846  [12928/30013]\n",
      "loss: 0.860700  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 75.1%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.829070  [  128/30013]\n",
      "loss: 0.708257  [12928/30013]\n",
      "loss: 0.833805  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.2%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.799910  [  128/30013]\n",
      "loss: 0.688487  [12928/30013]\n",
      "loss: 0.806547  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 77.3%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.769757  [  128/30013]\n",
      "loss: 0.668543  [12928/30013]\n",
      "loss: 0.779144  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.5%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.738543  [  128/30013]\n",
      "loss: 0.648126  [12928/30013]\n",
      "loss: 0.751585  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 79.7%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.706381  [  128/30013]\n",
      "loss: 0.626866  [12928/30013]\n",
      "loss: 0.723802  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 81.0%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.673580  [  128/30013]\n",
      "loss: 0.604557  [12928/30013]\n",
      "loss: 0.695715  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.2%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.640747  [  128/30013]\n",
      "loss: 0.581195  [12928/30013]\n",
      "loss: 0.666986  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 83.4%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.608348  [  128/30013]\n",
      "loss: 0.557117  [12928/30013]\n",
      "loss: 0.637290  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.4%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.576591  [  128/30013]\n",
      "loss: 0.532993  [12928/30013]\n",
      "loss: 0.606738  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.6%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.545530  [  128/30013]\n",
      "loss: 0.509212  [12928/30013]\n",
      "loss: 0.575567  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 86.8%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.515042  [  128/30013]\n",
      "loss: 0.485848  [12928/30013]\n",
      "loss: 0.543892  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.9%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.485054  [  128/30013]\n",
      "loss: 0.462819  [12928/30013]\n",
      "loss: 0.512094  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.455669  [  128/30013]\n",
      "loss: 0.440000  [12928/30013]\n",
      "loss: 0.480668  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.2%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.426872  [  128/30013]\n",
      "loss: 0.417056  [12928/30013]\n",
      "loss: 0.449685  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.2%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.398601  [  128/30013]\n",
      "loss: 0.393603  [12928/30013]\n",
      "loss: 0.419019  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.3%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.371113  [  128/30013]\n",
      "loss: 0.369716  [12928/30013]\n",
      "loss: 0.388715  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.1%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.344719  [  128/30013]\n",
      "loss: 0.345815  [12928/30013]\n",
      "loss: 0.358776  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.1%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.319460  [  128/30013]\n",
      "loss: 0.322122  [12928/30013]\n",
      "loss: 0.329005  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.8%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.295238  [  128/30013]\n",
      "loss: 0.298497  [12928/30013]\n",
      "loss: 0.299593  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.6%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.271873  [  128/30013]\n",
      "loss: 0.275265  [12928/30013]\n",
      "loss: 0.271686  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.4%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.249381  [  128/30013]\n",
      "loss: 0.252898  [12928/30013]\n",
      "loss: 0.245757  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.9%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.227636  [  128/30013]\n",
      "loss: 0.231566  [12928/30013]\n",
      "loss: 0.221572  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.4%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.206706  [  128/30013]\n",
      "loss: 0.211351  [12928/30013]\n",
      "loss: 0.199184  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.9%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.187038  [  128/30013]\n",
      "loss: 0.192292  [12928/30013]\n",
      "loss: 0.178517  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.3%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.168977  [  128/30013]\n",
      "loss: 0.174266  [12928/30013]\n",
      "loss: 0.159789  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.7%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.152452  [  128/30013]\n",
      "loss: 0.156840  [12928/30013]\n",
      "loss: 0.142820  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.137113  [  128/30013]\n",
      "loss: 0.140755  [12928/30013]\n",
      "loss: 0.127342  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.3%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.122840  [  128/30013]\n",
      "loss: 0.127266  [12928/30013]\n",
      "loss: 0.113143  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.109475  [  128/30013]\n",
      "loss: 0.115017  [12928/30013]\n",
      "loss: 0.099972  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.5%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.096739  [  128/30013]\n",
      "loss: 0.102631  [12928/30013]\n",
      "loss: 0.087978  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.084516  [  128/30013]\n",
      "loss: 0.090686  [12928/30013]\n",
      "loss: 0.077170  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.073215  [  128/30013]\n",
      "loss: 0.081086  [12928/30013]\n",
      "loss: 0.067124  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.8%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.064105  [  128/30013]\n",
      "loss: 0.072636  [12928/30013]\n",
      "loss: 0.057745  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.056277  [  128/30013]\n",
      "loss: 0.064749  [12928/30013]\n",
      "loss: 0.049487  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.049183  [  128/30013]\n",
      "loss: 0.057494  [12928/30013]\n",
      "loss: 0.042584  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.042554  [  128/30013]\n",
      "loss: 0.051021  [12928/30013]\n",
      "loss: 0.036847  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.036457  [  128/30013]\n",
      "loss: 0.045117  [12928/30013]\n",
      "loss: 0.032070  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.031798  [  128/30013]\n",
      "loss: 0.039718  [12928/30013]\n",
      "loss: 0.028014  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.027861  [  128/30013]\n",
      "loss: 0.034813  [12928/30013]\n",
      "loss: 0.024560  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.024513  [  128/30013]\n",
      "loss: 0.030227  [12928/30013]\n",
      "loss: 0.021575  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.021572  [  128/30013]\n",
      "loss: 0.025993  [12928/30013]\n",
      "loss: 0.018987  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.018921  [  128/30013]\n",
      "loss: 0.022110  [12928/30013]\n",
      "loss: 0.016715  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.016549  [  128/30013]\n",
      "loss: 0.018787  [12928/30013]\n",
      "loss: 0.014715  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.014423  [  128/30013]\n",
      "loss: 0.016226  [12928/30013]\n",
      "loss: 0.012984  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.012648  [  128/30013]\n",
      "loss: 0.014118  [12928/30013]\n",
      "loss: 0.011471  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.011107  [  128/30013]\n",
      "loss: 0.012321  [12928/30013]\n",
      "loss: 0.010141  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.009696  [  128/30013]\n",
      "loss: 0.010777  [12928/30013]\n",
      "loss: 0.008971  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.008392  [  128/30013]\n",
      "loss: 0.009450  [12928/30013]\n",
      "loss: 0.007940  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.007293  [  128/30013]\n",
      "loss: 0.008294  [12928/30013]\n",
      "loss: 0.007029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.006416  [  128/30013]\n",
      "loss: 0.007280  [12928/30013]\n",
      "loss: 0.006223  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.005668  [  128/30013]\n",
      "loss: 0.006420  [12928/30013]\n",
      "loss: 0.005503  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.004998  [  128/30013]\n",
      "loss: 0.005653  [12928/30013]\n",
      "loss: 0.004863  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.004408  [  128/30013]\n",
      "loss: 0.004987  [12928/30013]\n",
      "loss: 0.004297  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.003884  [  128/30013]\n",
      "loss: 0.004409  [12928/30013]\n",
      "loss: 0.003798  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.003421  [  128/30013]\n",
      "loss: 0.003904  [12928/30013]\n",
      "loss: 0.003356  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.003012  [  128/30013]\n",
      "loss: 0.003459  [12928/30013]\n",
      "loss: 0.002967  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.002650  [  128/30013]\n",
      "loss: 0.003067  [12928/30013]\n",
      "loss: 0.002623  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.002331  [  128/30013]\n",
      "loss: 0.002718  [12928/30013]\n",
      "loss: 0.002320  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.002051  [  128/30013]\n",
      "loss: 0.002409  [12928/30013]\n",
      "loss: 0.002053  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.001804  [  128/30013]\n",
      "loss: 0.002135  [12928/30013]\n",
      "loss: 0.001817  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.001588  [  128/30013]\n",
      "loss: 0.001891  [12928/30013]\n",
      "loss: 0.001609  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.001399  [  128/30013]\n",
      "loss: 0.001674  [12928/30013]\n",
      "loss: 0.001425  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.001233  [  128/30013]\n",
      "loss: 0.001481  [12928/30013]\n",
      "loss: 0.001262  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.001088  [  128/30013]\n",
      "loss: 0.001311  [12928/30013]\n",
      "loss: 0.001118  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000960  [  128/30013]\n",
      "loss: 0.001159  [12928/30013]\n",
      "loss: 0.000991  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000849  [  128/30013]\n",
      "loss: 0.001025  [12928/30013]\n",
      "loss: 0.000878  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000750  [  128/30013]\n",
      "loss: 0.000907  [12928/30013]\n",
      "loss: 0.000778  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000663  [  128/30013]\n",
      "loss: 0.000802  [12928/30013]\n",
      "loss: 0.000689  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000587  [  128/30013]\n",
      "loss: 0.000709  [12928/30013]\n",
      "loss: 0.000610  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000520  [  128/30013]\n",
      "loss: 0.000627  [12928/30013]\n",
      "loss: 0.000540  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000460  [  128/30013]\n",
      "loss: 0.000553  [12928/30013]\n",
      "loss: 0.000478  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000407  [  128/30013]\n",
      "loss: 0.000487  [12928/30013]\n",
      "loss: 0.000423  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n"
     ]
    }
   ],
   "source": [
    "x = X_train\n",
    "y = y\n",
    "train_data = CustomDataset(x, y, model_transform)\n",
    "train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "DPTargetModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "DPTargetModel.to(device)\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.Adam(DPTargetModel.parameters(), lr=LEARNING_RATE)\n",
    "for t in range(epochs):\n",
    "    print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "    train(train_dataloader, DPTargetModel, loss_fn, optimizer, device)\n",
    "print(\"Done!\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "33ad59b9-8c06-4e7f-a229-d2d73773fa9e",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 4.606804  [  128/30013]\n",
      "loss: 2.793135  [12928/30013]\n",
      "loss: 1.724637  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 26.3%\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 1.604082  [  128/30013]\n",
      "loss: 1.260151  [12928/30013]\n",
      "loss: 1.066650  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 60.7%\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 1.152337  [  128/30013]\n",
      "loss: 0.983013  [12928/30013]\n",
      "loss: 0.839299  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 68.8%\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.986817  [  128/30013]\n",
      "loss: 0.841902  [12928/30013]\n",
      "loss: 0.704716  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 73.5%\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.859142  [  128/30013]\n",
      "loss: 0.735760  [12928/30013]\n",
      "loss: 0.637138  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 76.7%\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.770455  [  128/30013]\n",
      "loss: 0.660858  [12928/30013]\n",
      "loss: 0.593372  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 78.7%\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.706868  [  128/30013]\n",
      "loss: 0.600292  [12928/30013]\n",
      "loss: 0.545814  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 80.6%\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.643587  [  128/30013]\n",
      "loss: 0.540405  [12928/30013]\n",
      "loss: 0.494563  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 82.3%\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.581536  [  128/30013]\n",
      "loss: 0.479227  [12928/30013]\n",
      "loss: 0.444904  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 84.1%\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.525861  [  128/30013]\n",
      "loss: 0.429411  [12928/30013]\n",
      "loss: 0.396873  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 85.6%\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.467245  [  128/30013]\n",
      "loss: 0.383752  [12928/30013]\n",
      "loss: 0.356245  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 87.2%\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.416309  [  128/30013]\n",
      "loss: 0.348530  [12928/30013]\n",
      "loss: 0.322340  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 88.6%\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.365688  [  128/30013]\n",
      "loss: 0.316860  [12928/30013]\n",
      "loss: 0.299940  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 89.8%\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.325178  [  128/30013]\n",
      "loss: 0.292528  [12928/30013]\n",
      "loss: 0.279467  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 90.9%\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.298574  [  128/30013]\n",
      "loss: 0.274189  [12928/30013]\n",
      "loss: 0.247771  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 91.8%\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.267287  [  128/30013]\n",
      "loss: 0.251107  [12928/30013]\n",
      "loss: 0.226718  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 92.8%\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.242923  [  128/30013]\n",
      "loss: 0.237382  [12928/30013]\n",
      "loss: 0.216799  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 93.6%\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.254764  [  128/30013]\n",
      "loss: 0.217243  [12928/30013]\n",
      "loss: 0.190212  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 94.3%\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.212999  [  128/30013]\n",
      "loss: 0.216005  [12928/30013]\n",
      "loss: 0.172773  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.1%\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.187201  [  128/30013]\n",
      "loss: 0.187857  [12928/30013]\n",
      "loss: 0.157195  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 95.9%\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.171138  [  128/30013]\n",
      "loss: 0.174628  [12928/30013]\n",
      "loss: 0.152903  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.5%\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.153174  [  128/30013]\n",
      "loss: 0.169393  [12928/30013]\n",
      "loss: 0.124470  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 96.8%\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.144467  [  128/30013]\n",
      "loss: 0.159818  [12928/30013]\n",
      "loss: 0.120198  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.1%\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.154650  [  128/30013]\n",
      "loss: 0.138194  [12928/30013]\n",
      "loss: 0.105935  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 97.5%\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.122838  [  128/30013]\n",
      "loss: 0.126760  [12928/30013]\n",
      "loss: 0.102474  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.2%\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.112015  [  128/30013]\n",
      "loss: 0.118096  [12928/30013]\n",
      "loss: 0.088007  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.3%\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.105926  [  128/30013]\n",
      "loss: 0.104048  [12928/30013]\n",
      "loss: 0.085254  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.6%\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.108153  [  128/30013]\n",
      "loss: 0.091001  [12928/30013]\n",
      "loss: 0.074095  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 98.8%\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.096076  [  128/30013]\n",
      "loss: 0.096806  [12928/30013]\n",
      "loss: 0.070772  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.097631  [  128/30013]\n",
      "loss: 0.091755  [12928/30013]\n",
      "loss: 0.076625  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.0%\n",
      "Epoch 31\n",
      "-------------------------------\n",
      "loss: 0.072395  [  128/30013]\n",
      "loss: 0.074264  [12928/30013]\n",
      "loss: 0.068009  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.1%\n",
      "Epoch 32\n",
      "-------------------------------\n",
      "loss: 0.080251  [  128/30013]\n",
      "loss: 0.069743  [12928/30013]\n",
      "loss: 0.049416  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 33\n",
      "-------------------------------\n",
      "loss: 0.074717  [  128/30013]\n",
      "loss: 0.060801  [12928/30013]\n",
      "loss: 0.049910  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 34\n",
      "-------------------------------\n",
      "loss: 0.053672  [  128/30013]\n",
      "loss: 0.056718  [12928/30013]\n",
      "loss: 0.044794  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 35\n",
      "-------------------------------\n",
      "loss: 0.051651  [  128/30013]\n",
      "loss: 0.052680  [12928/30013]\n",
      "loss: 0.107709  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.4%\n",
      "Epoch 36\n",
      "-------------------------------\n",
      "loss: 0.057796  [  128/30013]\n",
      "loss: 0.047543  [12928/30013]\n",
      "loss: 0.039170  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.2%\n",
      "Epoch 37\n",
      "-------------------------------\n",
      "loss: 0.058589  [  128/30013]\n",
      "loss: 0.044594  [12928/30013]\n",
      "loss: 0.046253  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.6%\n",
      "Epoch 38\n",
      "-------------------------------\n",
      "loss: 0.040821  [  128/30013]\n",
      "loss: 0.047899  [12928/30013]\n",
      "loss: 0.024853  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.7%\n",
      "Epoch 39\n",
      "-------------------------------\n",
      "loss: 0.037536  [  128/30013]\n",
      "loss: 0.030464  [12928/30013]\n",
      "loss: 0.019716  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 99.9%\n",
      "Epoch 40\n",
      "-------------------------------\n",
      "loss: 0.032254  [  128/30013]\n",
      "loss: 0.026155  [12928/30013]\n",
      "loss: 0.014881  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 41\n",
      "-------------------------------\n",
      "loss: 0.027453  [  128/30013]\n",
      "loss: 0.023345  [12928/30013]\n",
      "loss: 0.012728  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 42\n",
      "-------------------------------\n",
      "loss: 0.022296  [  128/30013]\n",
      "loss: 0.018152  [12928/30013]\n",
      "loss: 0.012140  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 43\n",
      "-------------------------------\n",
      "loss: 0.019885  [  128/30013]\n",
      "loss: 0.016350  [12928/30013]\n",
      "loss: 0.010909  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 44\n",
      "-------------------------------\n",
      "loss: 0.017330  [  128/30013]\n",
      "loss: 0.014658  [12928/30013]\n",
      "loss: 0.009955  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 45\n",
      "-------------------------------\n",
      "loss: 0.015480  [  128/30013]\n",
      "loss: 0.013381  [12928/30013]\n",
      "loss: 0.009061  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 46\n",
      "-------------------------------\n",
      "loss: 0.013753  [  128/30013]\n",
      "loss: 0.012149  [12928/30013]\n",
      "loss: 0.008204  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 47\n",
      "-------------------------------\n",
      "loss: 0.012330  [  128/30013]\n",
      "loss: 0.011030  [12928/30013]\n",
      "loss: 0.007428  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 48\n",
      "-------------------------------\n",
      "loss: 0.011085  [  128/30013]\n",
      "loss: 0.009987  [12928/30013]\n",
      "loss: 0.006728  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 49\n",
      "-------------------------------\n",
      "loss: 0.009933  [  128/30013]\n",
      "loss: 0.009040  [12928/30013]\n",
      "loss: 0.006086  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 50\n",
      "-------------------------------\n",
      "loss: 0.008887  [  128/30013]\n",
      "loss: 0.008177  [12928/30013]\n",
      "loss: 0.005500  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 51\n",
      "-------------------------------\n",
      "loss: 0.007927  [  128/30013]\n",
      "loss: 0.007390  [12928/30013]\n",
      "loss: 0.004964  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 52\n",
      "-------------------------------\n",
      "loss: 0.007055  [  128/30013]\n",
      "loss: 0.006670  [12928/30013]\n",
      "loss: 0.004474  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 53\n",
      "-------------------------------\n",
      "loss: 0.006266  [  128/30013]\n",
      "loss: 0.006012  [12928/30013]\n",
      "loss: 0.004029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 54\n",
      "-------------------------------\n",
      "loss: 0.005554  [  128/30013]\n",
      "loss: 0.005412  [12928/30013]\n",
      "loss: 0.003625  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 55\n",
      "-------------------------------\n",
      "loss: 0.004916  [  128/30013]\n",
      "loss: 0.004864  [12928/30013]\n",
      "loss: 0.003259  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 56\n",
      "-------------------------------\n",
      "loss: 0.004346  [  128/30013]\n",
      "loss: 0.004366  [12928/30013]\n",
      "loss: 0.002928  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 57\n",
      "-------------------------------\n",
      "loss: 0.003839  [  128/30013]\n",
      "loss: 0.003914  [12928/30013]\n",
      "loss: 0.002628  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 58\n",
      "-------------------------------\n",
      "loss: 0.003389  [  128/30013]\n",
      "loss: 0.003504  [12928/30013]\n",
      "loss: 0.002357  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 59\n",
      "-------------------------------\n",
      "loss: 0.002991  [  128/30013]\n",
      "loss: 0.003133  [12928/30013]\n",
      "loss: 0.002113  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 60\n",
      "-------------------------------\n",
      "loss: 0.002640  [  128/30013]\n",
      "loss: 0.002799  [12928/30013]\n",
      "loss: 0.001892  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 61\n",
      "-------------------------------\n",
      "loss: 0.002330  [  128/30013]\n",
      "loss: 0.002497  [12928/30013]\n",
      "loss: 0.001693  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 62\n",
      "-------------------------------\n",
      "loss: 0.002058  [  128/30013]\n",
      "loss: 0.002226  [12928/30013]\n",
      "loss: 0.001514  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 63\n",
      "-------------------------------\n",
      "loss: 0.001818  [  128/30013]\n",
      "loss: 0.001983  [12928/30013]\n",
      "loss: 0.001352  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 64\n",
      "-------------------------------\n",
      "loss: 0.001607  [  128/30013]\n",
      "loss: 0.001764  [12928/30013]\n",
      "loss: 0.001207  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 65\n",
      "-------------------------------\n",
      "loss: 0.001422  [  128/30013]\n",
      "loss: 0.001568  [12928/30013]\n",
      "loss: 0.001077  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 66\n",
      "-------------------------------\n",
      "loss: 0.001258  [  128/30013]\n",
      "loss: 0.001393  [12928/30013]\n",
      "loss: 0.000960  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 67\n",
      "-------------------------------\n",
      "loss: 0.001114  [  128/30013]\n",
      "loss: 0.001236  [12928/30013]\n",
      "loss: 0.000855  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 68\n",
      "-------------------------------\n",
      "loss: 0.000986  [  128/30013]\n",
      "loss: 0.001096  [12928/30013]\n",
      "loss: 0.000762  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 69\n",
      "-------------------------------\n",
      "loss: 0.000874  [  128/30013]\n",
      "loss: 0.000972  [12928/30013]\n",
      "loss: 0.000678  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 70\n",
      "-------------------------------\n",
      "loss: 0.000773  [  128/30013]\n",
      "loss: 0.000861  [12928/30013]\n",
      "loss: 0.000603  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 71\n",
      "-------------------------------\n",
      "loss: 0.000685  [  128/30013]\n",
      "loss: 0.000762  [12928/30013]\n",
      "loss: 0.000536  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 72\n",
      "-------------------------------\n",
      "loss: 0.000606  [  128/30013]\n",
      "loss: 0.000675  [12928/30013]\n",
      "loss: 0.000477  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 73\n",
      "-------------------------------\n",
      "loss: 0.000536  [  128/30013]\n",
      "loss: 0.000597  [12928/30013]\n",
      "loss: 0.000424  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 74\n",
      "-------------------------------\n",
      "loss: 0.000474  [  128/30013]\n",
      "loss: 0.000528  [12928/30013]\n",
      "loss: 0.000376  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 75\n",
      "-------------------------------\n",
      "loss: 0.000420  [  128/30013]\n",
      "loss: 0.000467  [12928/30013]\n",
      "loss: 0.000334  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 76\n",
      "-------------------------------\n",
      "loss: 0.000371  [  128/30013]\n",
      "loss: 0.000413  [12928/30013]\n",
      "loss: 0.000297  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 77\n",
      "-------------------------------\n",
      "loss: 0.000328  [  128/30013]\n",
      "loss: 0.000366  [12928/30013]\n",
      "loss: 0.000263  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 78\n",
      "-------------------------------\n",
      "loss: 0.000290  [  128/30013]\n",
      "loss: 0.000323  [12928/30013]\n",
      "loss: 0.000233  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 79\n",
      "-------------------------------\n",
      "loss: 0.000256  [  128/30013]\n",
      "loss: 0.000286  [12928/30013]\n",
      "loss: 0.000207  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 80\n",
      "-------------------------------\n",
      "loss: 0.000226  [  128/30013]\n",
      "loss: 0.000253  [12928/30013]\n",
      "loss: 0.000183  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 81\n",
      "-------------------------------\n",
      "loss: 0.000199  [  128/30013]\n",
      "loss: 0.000223  [12928/30013]\n",
      "loss: 0.000162  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 82\n",
      "-------------------------------\n",
      "loss: 0.000176  [  128/30013]\n",
      "loss: 0.000197  [12928/30013]\n",
      "loss: 0.000144  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 83\n",
      "-------------------------------\n",
      "loss: 0.000155  [  128/30013]\n",
      "loss: 0.000174  [12928/30013]\n",
      "loss: 0.000127  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 84\n",
      "-------------------------------\n",
      "loss: 0.000137  [  128/30013]\n",
      "loss: 0.000154  [12928/30013]\n",
      "loss: 0.000113  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 85\n",
      "-------------------------------\n",
      "loss: 0.000121  [  128/30013]\n",
      "loss: 0.000136  [12928/30013]\n",
      "loss: 0.000100  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 86\n",
      "-------------------------------\n",
      "loss: 0.000106  [  128/30013]\n",
      "loss: 0.000120  [12928/30013]\n",
      "loss: 0.000088  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 87\n",
      "-------------------------------\n",
      "loss: 0.000094  [  128/30013]\n",
      "loss: 0.000106  [12928/30013]\n",
      "loss: 0.000078  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 88\n",
      "-------------------------------\n",
      "loss: 0.000083  [  128/30013]\n",
      "loss: 0.000094  [12928/30013]\n",
      "loss: 0.000069  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 89\n",
      "-------------------------------\n",
      "loss: 0.000073  [  128/30013]\n",
      "loss: 0.000083  [12928/30013]\n",
      "loss: 0.000061  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 90\n",
      "-------------------------------\n",
      "loss: 0.000064  [  128/30013]\n",
      "loss: 0.000073  [12928/30013]\n",
      "loss: 0.000054  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 91\n",
      "-------------------------------\n",
      "loss: 0.000057  [  128/30013]\n",
      "loss: 0.000065  [12928/30013]\n",
      "loss: 0.000048  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 92\n",
      "-------------------------------\n",
      "loss: 0.000050  [  128/30013]\n",
      "loss: 0.000057  [12928/30013]\n",
      "loss: 0.000043  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 93\n",
      "-------------------------------\n",
      "loss: 0.000044  [  128/30013]\n",
      "loss: 0.000050  [12928/30013]\n",
      "loss: 0.000038  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 94\n",
      "-------------------------------\n",
      "loss: 0.000039  [  128/30013]\n",
      "loss: 0.000044  [12928/30013]\n",
      "loss: 0.000033  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 95\n",
      "-------------------------------\n",
      "loss: 0.000034  [  128/30013]\n",
      "loss: 0.000039  [12928/30013]\n",
      "loss: 0.000029  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 96\n",
      "-------------------------------\n",
      "loss: 0.000030  [  128/30013]\n",
      "loss: 0.000035  [12928/30013]\n",
      "loss: 0.000026  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 97\n",
      "-------------------------------\n",
      "loss: 0.000027  [  128/30013]\n",
      "loss: 0.000031  [12928/30013]\n",
      "loss: 0.000023  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 98\n",
      "-------------------------------\n",
      "loss: 0.000023  [  128/30013]\n",
      "loss: 0.000027  [12928/30013]\n",
      "loss: 0.000020  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 99\n",
      "-------------------------------\n",
      "loss: 0.000021  [  128/30013]\n",
      "loss: 0.000024  [12928/30013]\n",
      "loss: 0.000018  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Epoch 100\n",
      "-------------------------------\n",
      "loss: 0.000018  [  128/30013]\n",
      "loss: 0.000021  [12928/30013]\n",
      "loss: 0.000016  [25728/30013]\n",
      "Train Error: \n",
      " Accuracy: 100.0%\n",
      "Done!\n"
     ]
    }
   ],
   "source": [
    "x = X_data[mem_label]\n",
    "y = Y_data[mem_label]\n",
    "X_train = mPCA.transform(x)\n",
    "x = X_train\n",
    "y = y\n",
    "train_data = CustomDataset(x, y, model_transform)\n",
    "train_dataloader = DataLoader(train_data, batch_size=batch_size)\n",
    "CompareModel = globals()['create_{}_model'.format(model)](600, 100)\n",
    "CompareModel.to(device)\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "optimizer = torch.optim.Adam(CompareModel.parameters(), lr=LEARNING_RATE)\n",
    "for t in range(epochs):\n",
    "    print(f\"Epoch {t+1}\\n-------------------------------\")\n",
    "    train(train_dataloader, CompareModel, loss_fn, optimizer, device)\n",
    "print(\"Done!\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "52ef8b58-bf58-4acc-be38-087f39ecdcff",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f7e2498d-4e92-4dbc-a118-4cc728b00d8c",
   "metadata": {},
   "outputs": [],
   "source": [
    "dataframe = pd.read_csv('../datasets/purchase100.txt', sep=',', encoding='utf-8', header=None)\n",
    "X_tmp = dataframe.iloc[:, range(600)].values\n",
    "Y_tmp = np.array([i for i in dataframe.loc[:, 600]])\n",
    "x_test_data = X_tmp[90000:110000]\n",
    "y_test_data = Y_tmp[90000:110000]\n",
    "x_test_data= mPCA.transform(x_test_data)\n",
    "\n",
    "test_data = CustomDataset(x_test_data, y_test_data, model_transform)\n",
    "test_dataloader = DataLoader(test_data, batch_size=batch_size)\n",
    "\n",
    "evaluate(test_dataloader, DPTargetModel, loss_fn, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "4e6342da-c169-4aca-80c1-358288158353",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Test Error: \n",
      " Accuracy: 87.2%, Avg loss: 0.643482 \n",
      "\n"
     ]
    }
   ],
   "source": [
    "evaluate(test_dataloader, CompareModel, loss_fn, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ec4b32bd-ba91-40af-8d3c-681820ee4944",
   "metadata": {},
   "outputs": [],
   "source": [
    "X_data2= mPCA.transform(X_data.copy())\n",
    "\n",
    "all_data2 = CustomDataset(X_data2, Y_data, model_transform)\n",
    "all_dataloader2 = DataLoader(all_data2, batch_size=batch_size)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "39d7d163-552c-4060-baa4-35fa79d13b34",
   "metadata": {},
   "outputs": [],
   "source": [
    "loss_fn = nn.CrossEntropyLoss()\n",
    "pred_result = base_attack(all_dataloader2, DPTargetModel, loss_fn, device)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)\n",
    "\n",
    "loss_fn = nn.CrossEntropyLoss()\n",
    "pred_result = base_attack(all_dataloader2, CompareModel, loss_fn, device)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3c5c53a7-afab-4e8a-93ed-01629bfe65cd",
   "metadata": {},
   "outputs": [],
   "source": [
    "_, score = get_score_from_model(all_dataloader2, DPTargetModel, device)\n",
    "pred_result = LIRA_attack(train_keep, score_all, score, train_keep[0])\n",
    "evaluate_ROC(pred_result, train_keep[0], threshold=0)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)\n",
    "\n",
    "_, score = get_score_from_model(all_dataloader2, CompareModel, device)\n",
    "pred_result = LIRA_attack(train_keep, score_all, score, train_keep[0])\n",
    "evaluate_ROC(pred_result, train_keep[0], threshold=0)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "29f46e80-f6b2-4a64-9a25-449b9334b55d",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "70224642-1590-429e-883c-83f5aeae070c",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "99f2dd96-00a3-41b1-9900-17a4f6454e80",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "676d75a3-2d5c-4374-8519-c801f68dc0fd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "9fd1b1f2-e781-4d84-86b6-5acf46779d22",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "501bb898-8b23-4c51-ac91-eb9e2f78a3b2",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8dfc17cd-47a2-4c01-8ecd-8970b156fa8d",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f78fd657-d088-47a7-9562-663412f58e90",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "32ae305f-e681-4e9b-8400-8728612f889f",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ed3efcd5-af76-49a7-9fff-638deac00138",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "8bf4ae41-482b-464b-b2df-f79924f00fe2",
   "metadata": {},
   "source": [
    "## 脆弱点的两种提取方式"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "a20f6954-5154-483b-a64b-8ab536b4989b",
   "metadata": {},
   "source": [
    "### 风险指标"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "90586eba-a7f4-4a19-a8a8-54d5d8633693",
   "metadata": {},
   "outputs": [],
   "source": [
    "# # 加载所有参考模型上的损失、置信度、得分输出\n",
    "# conf_data_all, label_data, score_all = load_score_data_all(X_data, Y_data, weight_dir, num_shadowsets, data_name, model, weight_part, model_transform, batch_size, device)\n",
    "# loss_fn = nn.CrossEntropyLoss(reduction='none')\n",
    "# loss_data_all, label_data = load_loss_data_all(X_data, Y_data, loss_fn, weight_dir, num_shadowsets, data_name, model, weight_part, model_transform, batch_size, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "5fbe2bf6-1750-4c23-91a8-80caf9b0825e",
   "metadata": {},
   "outputs": [],
   "source": [
    "# np.save('../outputs_save/Purchase100_limited_loss.npy', loss_data_all)\n",
    "# np.save('../outputs_save/Purchase100_limited_score.npy', score_all)\n",
    "# np.save('../outputs_save/Purchase100_limited_conf.npy', conf_data_all)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "5a6e9c56-60bd-4bfc-a912-5e3b5eb52a4e",
   "metadata": {},
   "outputs": [],
   "source": [
    "loss_data_all = np.load('../outputs_save/Purchase100_limited_loss.npy')\n",
    "score_all = np.load('../outputs_save/Purchase100_limited_score.npy')\n",
    "conf_data_all = np.load('../outputs_save/Purchase100_limited_conf.npy')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "8b45bd6b-c391-4ced-99bd-57b0b8c3c30b",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 为每个数据点计算风险指标\n",
    "# 计算出一个点的脆弱程度评分\n",
    "pri_risk_all = get_risk_score(loss_data_all, train_keep)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "d8258b9c-d6f3-4e64-ab4f-4b068bb4b209",
   "metadata": {},
   "outputs": [],
   "source": [
    "pri_risk_rank = np.argsort(pri_risk_all)\n",
    "pri_risk_rank = np.flip(pri_risk_rank)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "16cc7233-a9a0-4e74-990d-70e0e6ebfb74",
   "metadata": {},
   "source": [
    "### 离群点"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "e7e42cbf-89f1-4d1f-bbd0-dc87252c22cb",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 准备好logits的输出\n",
    "# 计算余弦相似度 5w*5w的大型矩阵\n",
    "# 邻居距离alpha，邻居数量"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "8d4c50e3-0ca3-463c-ad2a-4e4fd987b12e",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# logits_data_all, label_data = load_logits_data_all(X_data, Y_data, weight_dir, num_shadowsets, data_name, model, weight_part, model_transform, batch_size, device)\n",
    "# np.save('../outputs_save/Purchase100_limited_logits.npy', logits_data_all)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "a9314bc7-f529-46f5-834b-d1c387effef7",
   "metadata": {},
   "outputs": [],
   "source": [
    "logits_data_all = np.load('../outputs_save/Purchase100_limited_logits.npy')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "50d2365f-9e9d-4ffc-a9f7-16c1d3ed5646",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 按照k个模型进行拼接\n",
    "k = 10\n",
    "for i in range(k):\n",
    "    if i == 0:\n",
    "        combine_features = logits_data_all[i]\n",
    "    else:\n",
    "        combine_features = np.concatenate((combine_features, logits_data_all[i]),axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "1308ab27-3798-4a3c-8dd2-b5ce0234042e",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "compute to: 0\n",
      "compute to: 50\n",
      "compute to: 100\n",
      "compute to: 150\n",
      "compute to: 200\n",
      "compute to: 250\n",
      "compute to: 300\n",
      "compute to: 350\n",
      "compute to: 400\n",
      "compute to: 450\n",
      "compute to: 500\n",
      "compute to: 550\n",
      "compute to: 600\n",
      "compute to: 650\n",
      "compute to: 700\n",
      "compute to: 750\n",
      "compute to: 800\n",
      "compute to: 850\n",
      "compute to: 900\n",
      "compute to: 950\n",
      "compute to: 1000\n",
      "compute to: 1050\n",
      "compute to: 1100\n",
      "compute to: 1150\n",
      "compute to: 1200\n",
      "compute to: 1250\n",
      "compute to: 1300\n",
      "compute to: 1350\n",
      "compute to: 1400\n",
      "compute to: 1450\n",
      "compute to: 1500\n",
      "compute to: 1550\n",
      "compute to: 1600\n",
      "compute to: 1650\n",
      "compute to: 1700\n",
      "compute to: 1750\n",
      "compute to: 1800\n",
      "compute to: 1850\n",
      "compute to: 1900\n",
      "compute to: 1950\n",
      "compute to: 2000\n",
      "compute to: 2050\n",
      "compute to: 2100\n",
      "compute to: 2150\n",
      "compute to: 2200\n",
      "compute to: 2250\n",
      "compute to: 2300\n",
      "compute to: 2350\n",
      "compute to: 2400\n",
      "compute to: 2450\n",
      "compute to: 2500\n",
      "compute to: 2550\n",
      "compute to: 2600\n",
      "compute to: 2650\n",
      "compute to: 2700\n",
      "compute to: 2750\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[19], line 13\u001b[0m\n\u001b[1;32m     11\u001b[0m vec1 \u001b[38;5;241m=\u001b[39m combine_features[i]\n\u001b[1;32m     12\u001b[0m vec2 \u001b[38;5;241m=\u001b[39m combine_features[j]        \n\u001b[0;32m---> 13\u001b[0m cos_sim \u001b[38;5;241m=\u001b[39m vec1\u001b[38;5;241m.\u001b[39mdot(vec2) \u001b[38;5;241m/\u001b[39m (np\u001b[38;5;241m.\u001b[39mlinalg\u001b[38;5;241m.\u001b[39mnorm(vec1) \u001b[38;5;241m*\u001b[39m \u001b[43mnp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlinalg\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnorm\u001b[49m\u001b[43m(\u001b[49m\u001b[43mvec2\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m     14\u001b[0m cos_dis \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0.5\u001b[39m \u001b[38;5;241m-\u001b[39m \u001b[38;5;241m0.5\u001b[39m \u001b[38;5;241m*\u001b[39m cos_sim\n\u001b[1;32m     15\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m m \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;28mlen\u001b[39m(alpha_list)):\n",
      "File \u001b[0;32m<__array_function__ internals>:200\u001b[0m, in \u001b[0;36mnorm\u001b[0;34m(*args, **kwargs)\u001b[0m\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/numpy/linalg/linalg.py:2342\u001b[0m, in \u001b[0;36mnorm\u001b[0;34m(x, ord, axis, keepdims)\u001b[0m\n\u001b[1;32m   2338\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_norm_dispatcher\u001b[39m(x, \u001b[38;5;28mord\u001b[39m\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, axis\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, keepdims\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m):\n\u001b[1;32m   2339\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m (x,)\n\u001b[0;32m-> 2342\u001b[0m \u001b[38;5;129m@array_function_dispatch\u001b[39m(_norm_dispatcher)\n\u001b[1;32m   2343\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mnorm\u001b[39m(x, \u001b[38;5;28mord\u001b[39m\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, axis\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, keepdims\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m):\n\u001b[1;32m   2344\u001b[0m \u001b[38;5;250m    \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m   2345\u001b[0m \u001b[38;5;124;03m    Matrix or vector norm.\u001b[39;00m\n\u001b[1;32m   2346\u001b[0m \n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m   2491\u001b[0m \n\u001b[1;32m   2492\u001b[0m \u001b[38;5;124;03m    \"\"\"\u001b[39;00m\n\u001b[1;32m   2493\u001b[0m     x \u001b[38;5;241m=\u001b[39m asarray(x)\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "# 数据量太大，不能保存所有的余弦相似度，只能需要时计算\n",
    "alpha_list = [0.05, 0.1, 0.12, 0.15, 0.2, 0.3]\n",
    "n_num_list = []\n",
    "# for i in range(combine_features.shape[0]):\n",
    "for i in range(10000):\n",
    "    n_count = [0 for _ in alpha_list]\n",
    "    if i%50 == 0:\n",
    "        print(f\"compute to: {i}\")\n",
    "    for j in range(combine_features.shape[0]):\n",
    "        # 余弦距离的计算\n",
    "        vec1 = combine_features[i]\n",
    "        vec2 = combine_features[j]        \n",
    "        cos_sim = vec1.dot(vec2) / (np.linalg.norm(vec1) * np.linalg.norm(vec2))\n",
    "        cos_dis = 0.5 - 0.5 * cos_sim\n",
    "        for m in range(len(alpha_list)):\n",
    "            if (cos_dis < alpha_list[m]):\n",
    "                n_count[m] += 1\n",
    "    n_num_list.append(n_count)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "ae886e98-bf25-4c0d-a2fa-f2c948574aa8",
   "metadata": {},
   "outputs": [
    {
     "ename": "FileNotFoundError",
     "evalue": "[Errno 2] No such file or directory: '../outputs_save/Purchase100_limited_neigh.npy'",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[20], line 3\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;66;03m# neigh_data_all = np.array(n_num_list)\u001b[39;00m\n\u001b[1;32m      2\u001b[0m \u001b[38;5;66;03m# np.save('CIFAR10_neigh.npy', neigh_data_all)\u001b[39;00m\n\u001b[0;32m----> 3\u001b[0m neigh_data_all \u001b[38;5;241m=\u001b[39m \u001b[43mnp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43m../outputs_save/Purchase100_limited_neigh.npy\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m~/.conda/envs/opacus/lib/python3.8/site-packages/numpy/lib/npyio.py:405\u001b[0m, in \u001b[0;36mload\u001b[0;34m(file, mmap_mode, allow_pickle, fix_imports, encoding, max_header_size)\u001b[0m\n\u001b[1;32m    403\u001b[0m     own_fid \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[1;32m    404\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 405\u001b[0m     fid \u001b[38;5;241m=\u001b[39m stack\u001b[38;5;241m.\u001b[39menter_context(\u001b[38;5;28;43mopen\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mos_fspath\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfile\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrb\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m)\n\u001b[1;32m    406\u001b[0m     own_fid \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[1;32m    408\u001b[0m \u001b[38;5;66;03m# Code to distinguish from NumPy binary files and pickles.\u001b[39;00m\n",
      "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: '../outputs_save/Purchase100_limited_neigh.npy'"
     ]
    }
   ],
   "source": [
    "neigh_data_all = np.array(n_num_list)\n",
    "np.save('../outputs_save/Purchase100_limited_neigh.npy', neigh_data_all)\n",
    "neigh_data_all = np.load('../outputs_save/Purchase100_limited_neigh.npy')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "79592536-1181-4647-9e40-fc959daa668e",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(50000, 6)"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "neigh_data_all.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "9d815b0b-568b-46d5-bdeb-fba567a8e213",
   "metadata": {},
   "outputs": [],
   "source": [
    "neigh_num = neigh_data_all[:,1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "7bc974ea-ffb5-432f-b9da-ab7abce10270",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(50000,)"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "neigh_num.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "ddd4b8fa-1653-4bab-bf8f-4e2a116b17b7",
   "metadata": {},
   "outputs": [],
   "source": [
    "risk_rank = np.argsort(neigh_num)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "789a839a-cb2b-4da7-a1a2-ecac971344ef",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([29631, 39852, 26325, ..., 47315, 41363, 17410])"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "risk_rank"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "cce6c5e2-b030-4874-b08e-bf8466815484",
   "metadata": {},
   "source": [
    "## 针对脆弱点展开攻击"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "14cb98e0-737b-4ed0-93bd-a52681974606",
   "metadata": {},
   "source": [
    "### 基线攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "8c66acdc-645a-4cf8-b0f6-babc51bce18a",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 预测正确的判断为成员，预测不正确的判断为非成员"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "ecc0a131-e71a-4cd1-b5a3-a6c750abc382",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "CNN(\n",
       "  (conv1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
       "  (conv2): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
       "  (conv3): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
       "  (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n",
       "  (fc1): Linear(in_features=1024, out_features=500, bias=True)\n",
       "  (fc2): Linear(in_features=500, out_features=10, bias=True)\n",
       "  (BatchNorm1): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "  (BatchNorm2): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       "  (BatchNorm3): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
       ")"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 创建对应的目标模型\n",
    "if model in ['NN', 'NN_4layer']:\n",
    "    Target_Model = globals()['create_{}_model'.format(model)](X_data.shape[1], Y_data.max()+1)\n",
    "elif model == 'CNN':\n",
    "    Target_Model = globals()['create_{}_model'.format(model)](Y_data.max()+1, data_name)\n",
    "# 加载参数\n",
    "weight_path = os.path.join(weight_dir, \"{}_{}_epoch{}_model{}.pth\".format(data_name, model, epochs, tar_model))\n",
    "# print(Reference_Model)\n",
    "Target_Model.load_state_dict(torch.load(weight_path))\n",
    "Target_Model.to(device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "670ebafe-5710-4b0c-b05d-008a6098dbc8",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Test Error: \n",
      " Accuracy: 84.7%, Avg loss: 1.124064 \n",
      "\n"
     ]
    }
   ],
   "source": [
    "loss_fn = nn.CrossEntropyLoss()\n",
    "pred_result = base_attack(train_dataloader, Target_Model, loss_fn, device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "d0dfd319-c536-4a57-a2b9-246f9a577f60",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.8\n",
      "0.916\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result[risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c08b9a40-068e-4411-8a51-13a05337a7cd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "5cf3f668-0b0b-41b1-bc5f-73f89bed9169",
   "metadata": {},
   "source": [
    "### 阈值攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "61729130-6c97-4a19-b7c5-767f6ba1ae70",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 基于损失的阈值去做攻击，阈值如何确定？两个均值的均值"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "0dd9f430-c918-47cd-9e8d-4ee1b35bf143",
   "metadata": {},
   "outputs": [],
   "source": [
    "loss_threshold = get_loss_threshold(loss_data_all, train_keep)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "18f8fd0e-f86a-47fc-8105-7a3a53b10b28",
   "metadata": {},
   "outputs": [],
   "source": [
    "target_loss = loss_data_all[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "09270911-4d24-4c91-9c59-224213d1c319",
   "metadata": {},
   "outputs": [],
   "source": [
    "pred_result = target_loss < loss_threshold"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "id": "ef57e35b-e721-4cea-ad31-6b7cc4ef5362",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.7938\n",
      "0.8614\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result[risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "736cdbcb-9dd3-45ab-bcc4-37c7f490a588",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8259bc0c-fbc1-4025-bf4a-b194236a39ff",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "786da5f1-1ae9-4f9b-ae7f-e18702ea84a7",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "77876b58-669a-483a-b80c-8b041486eda0",
   "metadata": {},
   "source": [
    "### 似然比攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "id": "f1304cdd-cd2c-437f-af20-bdd3eb68caab",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 先对所有目标数据执行攻击，然后根据脆弱点筛选获取对应的攻击成功率或者ROC\n",
    "# 输出两个，memlabel和pred_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "c9ed8ba5-393b-4bdd-8a65-bbd6bbcaf4b6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "AUC value is: 0.7769893964732137\n",
      "Accuracy is: 0.69174\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "0.69174"
      ]
     },
     "execution_count": 36,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pred_result = LIRA_attack(train_keep, score_all, score_all[0], train_keep[0])\n",
    "evaluate_ROC(pred_result, train_keep[0], threshold=0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "5fdb2095-a077-40a4-b954-c4947b015080",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.8458\n",
      "0.9234\n"
     ]
    }
   ],
   "source": [
    "pred_clip = pred_result[risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][risk_rank[:5000]]\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)\n",
    "\n",
    "pred_clip = pred_result[pri_risk_rank[:5000]]\n",
    "mem_clip = train_keep[0][pri_risk_rank[:5000]]\n",
    "pred_clip = pred_clip > 0\n",
    "accuracy = metrics.accuracy_score(mem_clip, pred_clip)\n",
    "print(accuracy)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "35c96a5b-843d-4cb6-b964-f13dadac3193",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "26be2f5a-29e3-45c7-b6f0-c5382828f63f",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "457be14e-4b30-488a-aab0-0ae0b345e27e",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "8ea3f420-9a04-420f-83b6-06ea1658f6c7",
   "metadata": {},
   "source": [
    "### 影子模型攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "id": "7c5e0038-e2b6-4997-a64f-c9e80ed2c9c6",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 在所有数据上执行一次攻击"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "7d7fefb0-3e20-40e2-be99-4527c0f62306",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Files already downloaded and verified\n",
      "Files already downloaded and verified\n",
      " Error: \n",
      " Accuracy: 98.2%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 70.7%  \n",
      "\n",
      "(50000, 10) (50000,) (50000,)\n",
      "Files already downloaded and verified\n",
      "Files already downloaded and verified\n",
      " Error: \n",
      " Accuracy: 98.4%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 70.9%  \n",
      "\n",
      "(50000, 10) (50000,) (50000,)\n",
      "Files already downloaded and verified\n",
      "Files already downloaded and verified\n",
      " Error: \n",
      " Accuracy: 97.3%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 70.7%  \n",
      "\n",
      "(50000, 10) (50000,) (50000,)\n",
      "Files already downloaded and verified\n",
      "Files already downloaded and verified\n",
      " Error: \n",
      " Accuracy: 98.5%  \n",
      "\n",
      " Error: \n",
      " Accuracy: 70.9%  \n",
      "\n",
      "test data: (50000, 10) (50000,) (50000,)\n",
      "(150000, 10) (150000,)\n",
      "Attack_NN(\n",
      "  (linear_relu_stack): Sequential(\n",
      "    (0): Linear(in_features=11, out_features=128, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Linear(in_features=128, out_features=64, bias=True)\n",
      "    (3): ReLU()\n",
      "    (4): Linear(in_features=64, out_features=1, bias=True)\n",
      "  )\n",
      ")\n",
      "Epoch 1\n",
      "-------------------------------\n",
      "loss: 0.697296  [   64/150000]\n",
      "loss: 0.691852  [ 6464/150000]\n",
      "loss: 0.687517  [12864/150000]\n",
      "loss: 0.651151  [19264/150000]\n",
      "loss: 0.638183  [25664/150000]\n",
      "loss: 0.665337  [32064/150000]\n",
      "loss: 0.629165  [38464/150000]\n",
      "loss: 0.629897  [44864/150000]\n",
      "loss: 0.633173  [51264/150000]\n",
      "loss: 0.613899  [57664/150000]\n",
      "loss: 0.603700  [64064/150000]\n",
      "loss: 0.624719  [70464/150000]\n",
      "loss: 0.607570  [76864/150000]\n",
      "loss: 0.615966  [83264/150000]\n",
      "loss: 0.594782  [89664/150000]\n",
      "loss: 0.612963  [96064/150000]\n",
      "loss: 0.666927  [102464/150000]\n",
      "loss: 0.635343  [108864/150000]\n",
      "loss: 0.551613  [115264/150000]\n",
      "loss: 0.614503  [121664/150000]\n",
      "loss: 0.609472  [128064/150000]\n",
      "loss: 0.651940  [134464/150000]\n",
      "loss: 0.623228  [140864/150000]\n",
      "loss: 0.632289  [147264/150000]\n",
      "Epoch 2\n",
      "-------------------------------\n",
      "loss: 0.597696  [   64/150000]\n",
      "loss: 0.666137  [ 6464/150000]\n",
      "loss: 0.574050  [12864/150000]\n",
      "loss: 0.663844  [19264/150000]\n",
      "loss: 0.615269  [25664/150000]\n",
      "loss: 0.637435  [32064/150000]\n",
      "loss: 0.607537  [38464/150000]\n",
      "loss: 0.640098  [44864/150000]\n",
      "loss: 0.555198  [51264/150000]\n",
      "loss: 0.508200  [57664/150000]\n",
      "loss: 0.649726  [64064/150000]\n",
      "loss: 0.579738  [70464/150000]\n",
      "loss: 0.639545  [76864/150000]\n",
      "loss: 0.595912  [83264/150000]\n",
      "loss: 0.603856  [89664/150000]\n",
      "loss: 0.624075  [96064/150000]\n",
      "loss: 0.644577  [102464/150000]\n",
      "loss: 0.573552  [108864/150000]\n",
      "loss: 0.593017  [115264/150000]\n",
      "loss: 0.678881  [121664/150000]\n",
      "loss: 0.645067  [128064/150000]\n",
      "loss: 0.678110  [134464/150000]\n",
      "loss: 0.592467  [140864/150000]\n",
      "loss: 0.644923  [147264/150000]\n",
      "Epoch 3\n",
      "-------------------------------\n",
      "loss: 0.549876  [   64/150000]\n",
      "loss: 0.540238  [ 6464/150000]\n",
      "loss: 0.602370  [12864/150000]\n",
      "loss: 0.577029  [19264/150000]\n",
      "loss: 0.573094  [25664/150000]\n",
      "loss: 0.556609  [32064/150000]\n",
      "loss: 0.588493  [38464/150000]\n",
      "loss: 0.620299  [44864/150000]\n",
      "loss: 0.532603  [51264/150000]\n",
      "loss: 0.550144  [57664/150000]\n",
      "loss: 0.655471  [64064/150000]\n",
      "loss: 0.557127  [70464/150000]\n",
      "loss: 0.562649  [76864/150000]\n",
      "loss: 0.611032  [83264/150000]\n",
      "loss: 0.609907  [89664/150000]\n",
      "loss: 0.641321  [96064/150000]\n",
      "loss: 0.517016  [102464/150000]\n",
      "loss: 0.548020  [108864/150000]\n",
      "loss: 0.606035  [115264/150000]\n",
      "loss: 0.712533  [121664/150000]\n",
      "loss: 0.600617  [128064/150000]\n",
      "loss: 0.581069  [134464/150000]\n",
      "loss: 0.684818  [140864/150000]\n",
      "loss: 0.590795  [147264/150000]\n",
      "Epoch 4\n",
      "-------------------------------\n",
      "loss: 0.614286  [   64/150000]\n",
      "loss: 0.603604  [ 6464/150000]\n",
      "loss: 0.656957  [12864/150000]\n",
      "loss: 0.724023  [19264/150000]\n",
      "loss: 0.610916  [25664/150000]\n",
      "loss: 0.620281  [32064/150000]\n",
      "loss: 0.583793  [38464/150000]\n",
      "loss: 0.590866  [44864/150000]\n",
      "loss: 0.590776  [51264/150000]\n",
      "loss: 0.535479  [57664/150000]\n",
      "loss: 0.576726  [64064/150000]\n",
      "loss: 0.629341  [70464/150000]\n",
      "loss: 0.564287  [76864/150000]\n",
      "loss: 0.611278  [83264/150000]\n",
      "loss: 0.531345  [89664/150000]\n",
      "loss: 0.570144  [96064/150000]\n",
      "loss: 0.544331  [102464/150000]\n",
      "loss: 0.641698  [108864/150000]\n",
      "loss: 0.555752  [115264/150000]\n",
      "loss: 0.562733  [121664/150000]\n",
      "loss: 0.679568  [128064/150000]\n",
      "loss: 0.604731  [134464/150000]\n",
      "loss: 0.575098  [140864/150000]\n",
      "loss: 0.597137  [147264/150000]\n",
      "Epoch 5\n",
      "-------------------------------\n",
      "loss: 0.557888  [   64/150000]\n",
      "loss: 0.536265  [ 6464/150000]\n",
      "loss: 0.493845  [12864/150000]\n",
      "loss: 0.616729  [19264/150000]\n",
      "loss: 0.629660  [25664/150000]\n",
      "loss: 0.672249  [32064/150000]\n",
      "loss: 0.625117  [38464/150000]\n",
      "loss: 0.594650  [44864/150000]\n",
      "loss: 0.611802  [51264/150000]\n",
      "loss: 0.610690  [57664/150000]\n",
      "loss: 0.582970  [64064/150000]\n",
      "loss: 0.507220  [70464/150000]\n",
      "loss: 0.587091  [76864/150000]\n",
      "loss: 0.644539  [83264/150000]\n",
      "loss: 0.567560  [89664/150000]\n",
      "loss: 0.635086  [96064/150000]\n",
      "loss: 0.540519  [102464/150000]\n",
      "loss: 0.587662  [108864/150000]\n",
      "loss: 0.594138  [115264/150000]\n",
      "loss: 0.623967  [121664/150000]\n",
      "loss: 0.630961  [128064/150000]\n",
      "loss: 0.540283  [134464/150000]\n",
      "loss: 0.660469  [140864/150000]\n",
      "loss: 0.615367  [147264/150000]\n",
      "Epoch 6\n",
      "-------------------------------\n",
      "loss: 0.644627  [   64/150000]\n",
      "loss: 0.641388  [ 6464/150000]\n",
      "loss: 0.583667  [12864/150000]\n",
      "loss: 0.595985  [19264/150000]\n",
      "loss: 0.639247  [25664/150000]\n",
      "loss: 0.553457  [32064/150000]\n",
      "loss: 0.644644  [38464/150000]\n",
      "loss: 0.659022  [44864/150000]\n",
      "loss: 0.603649  [51264/150000]\n",
      "loss: 0.556295  [57664/150000]\n",
      "loss: 0.633961  [64064/150000]\n",
      "loss: 0.603780  [70464/150000]\n",
      "loss: 0.635701  [76864/150000]\n",
      "loss: 0.617778  [83264/150000]\n",
      "loss: 0.576959  [89664/150000]\n",
      "loss: 0.612733  [96064/150000]\n",
      "loss: 0.606709  [102464/150000]\n",
      "loss: 0.534224  [108864/150000]\n",
      "loss: 0.552378  [115264/150000]\n",
      "loss: 0.582934  [121664/150000]\n",
      "loss: 0.590858  [128064/150000]\n",
      "loss: 0.572298  [134464/150000]\n",
      "loss: 0.577786  [140864/150000]\n",
      "loss: 0.601727  [147264/150000]\n",
      "Epoch 7\n",
      "-------------------------------\n",
      "loss: 0.585471  [   64/150000]\n",
      "loss: 0.620201  [ 6464/150000]\n",
      "loss: 0.629633  [12864/150000]\n",
      "loss: 0.633278  [19264/150000]\n",
      "loss: 0.581919  [25664/150000]\n",
      "loss: 0.639833  [32064/150000]\n",
      "loss: 0.565880  [38464/150000]\n",
      "loss: 0.618099  [44864/150000]\n",
      "loss: 0.540124  [51264/150000]\n",
      "loss: 0.533662  [57664/150000]\n",
      "loss: 0.537237  [64064/150000]\n",
      "loss: 0.654931  [70464/150000]\n",
      "loss: 0.579782  [76864/150000]\n",
      "loss: 0.584689  [83264/150000]\n",
      "loss: 0.596409  [89664/150000]\n",
      "loss: 0.616381  [96064/150000]\n",
      "loss: 0.590769  [102464/150000]\n",
      "loss: 0.616326  [108864/150000]\n",
      "loss: 0.685181  [115264/150000]\n",
      "loss: 0.593606  [121664/150000]\n",
      "loss: 0.644492  [128064/150000]\n",
      "loss: 0.657713  [134464/150000]\n",
      "loss: 0.686037  [140864/150000]\n",
      "loss: 0.612704  [147264/150000]\n",
      "Epoch 8\n",
      "-------------------------------\n",
      "loss: 0.565326  [   64/150000]\n",
      "loss: 0.596956  [ 6464/150000]\n",
      "loss: 0.633713  [12864/150000]\n",
      "loss: 0.675944  [19264/150000]\n",
      "loss: 0.558952  [25664/150000]\n",
      "loss: 0.647076  [32064/150000]\n",
      "loss: 0.617525  [38464/150000]\n",
      "loss: 0.561403  [44864/150000]\n",
      "loss: 0.656842  [51264/150000]\n",
      "loss: 0.644154  [57664/150000]\n",
      "loss: 0.543379  [64064/150000]\n",
      "loss: 0.672034  [70464/150000]\n",
      "loss: 0.677683  [76864/150000]\n",
      "loss: 0.586226  [83264/150000]\n",
      "loss: 0.612088  [89664/150000]\n",
      "loss: 0.554074  [96064/150000]\n",
      "loss: 0.608314  [102464/150000]\n",
      "loss: 0.596135  [108864/150000]\n",
      "loss: 0.572143  [115264/150000]\n",
      "loss: 0.575213  [121664/150000]\n",
      "loss: 0.617696  [128064/150000]\n",
      "loss: 0.516614  [134464/150000]\n",
      "loss: 0.608312  [140864/150000]\n",
      "loss: 0.655292  [147264/150000]\n",
      "Epoch 9\n",
      "-------------------------------\n",
      "loss: 0.632591  [   64/150000]\n",
      "loss: 0.573968  [ 6464/150000]\n",
      "loss: 0.606144  [12864/150000]\n",
      "loss: 0.570658  [19264/150000]\n",
      "loss: 0.582340  [25664/150000]\n",
      "loss: 0.686753  [32064/150000]\n",
      "loss: 0.566058  [38464/150000]\n",
      "loss: 0.554912  [44864/150000]\n",
      "loss: 0.565513  [51264/150000]\n",
      "loss: 0.685594  [57664/150000]\n",
      "loss: 0.577988  [64064/150000]\n",
      "loss: 0.601425  [70464/150000]\n",
      "loss: 0.529391  [76864/150000]\n",
      "loss: 0.522878  [83264/150000]\n",
      "loss: 0.630615  [89664/150000]\n",
      "loss: 0.667440  [96064/150000]\n",
      "loss: 0.540676  [102464/150000]\n",
      "loss: 0.535263  [108864/150000]\n",
      "loss: 0.621468  [115264/150000]\n",
      "loss: 0.637567  [121664/150000]\n",
      "loss: 0.629170  [128064/150000]\n",
      "loss: 0.606112  [134464/150000]\n",
      "loss: 0.571968  [140864/150000]\n",
      "loss: 0.615397  [147264/150000]\n",
      "Epoch 10\n",
      "-------------------------------\n",
      "loss: 0.605752  [   64/150000]\n",
      "loss: 0.573386  [ 6464/150000]\n",
      "loss: 0.575619  [12864/150000]\n",
      "loss: 0.593930  [19264/150000]\n",
      "loss: 0.606027  [25664/150000]\n",
      "loss: 0.603035  [32064/150000]\n",
      "loss: 0.534000  [38464/150000]\n",
      "loss: 0.565585  [44864/150000]\n",
      "loss: 0.589906  [51264/150000]\n",
      "loss: 0.592201  [57664/150000]\n",
      "loss: 0.628924  [64064/150000]\n",
      "loss: 0.616412  [70464/150000]\n",
      "loss: 0.587669  [76864/150000]\n",
      "loss: 0.608327  [83264/150000]\n",
      "loss: 0.583130  [89664/150000]\n",
      "loss: 0.543986  [96064/150000]\n",
      "loss: 0.607370  [102464/150000]\n",
      "loss: 0.546562  [108864/150000]\n",
      "loss: 0.591624  [115264/150000]\n",
      "loss: 0.609809  [121664/150000]\n",
      "loss: 0.578402  [128064/150000]\n",
      "loss: 0.595384  [134464/150000]\n",
      "loss: 0.625442  [140864/150000]\n",
      "loss: 0.665480  [147264/150000]\n",
      "Epoch 11\n",
      "-------------------------------\n",
      "loss: 0.579572  [   64/150000]\n",
      "loss: 0.677842  [ 6464/150000]\n",
      "loss: 0.593931  [12864/150000]\n",
      "loss: 0.592321  [19264/150000]\n",
      "loss: 0.575612  [25664/150000]\n",
      "loss: 0.571065  [32064/150000]\n",
      "loss: 0.575749  [38464/150000]\n",
      "loss: 0.616480  [44864/150000]\n",
      "loss: 0.472566  [51264/150000]\n",
      "loss: 0.550193  [57664/150000]\n",
      "loss: 0.557135  [64064/150000]\n",
      "loss: 0.567849  [70464/150000]\n",
      "loss: 0.661146  [76864/150000]\n",
      "loss: 0.578843  [83264/150000]\n",
      "loss: 0.588202  [89664/150000]\n",
      "loss: 0.639270  [96064/150000]\n",
      "loss: 0.718008  [102464/150000]\n",
      "loss: 0.622295  [108864/150000]\n",
      "loss: 0.628525  [115264/150000]\n",
      "loss: 0.623000  [121664/150000]\n",
      "loss: 0.606138  [128064/150000]\n",
      "loss: 0.554370  [134464/150000]\n",
      "loss: 0.689069  [140864/150000]\n",
      "loss: 0.555399  [147264/150000]\n",
      "Epoch 12\n",
      "-------------------------------\n",
      "loss: 0.552356  [   64/150000]\n",
      "loss: 0.686734  [ 6464/150000]\n",
      "loss: 0.609447  [12864/150000]\n",
      "loss: 0.604392  [19264/150000]\n",
      "loss: 0.602954  [25664/150000]\n",
      "loss: 0.546178  [32064/150000]\n",
      "loss: 0.628506  [38464/150000]\n",
      "loss: 0.673439  [44864/150000]\n",
      "loss: 0.625517  [51264/150000]\n",
      "loss: 0.598829  [57664/150000]\n",
      "loss: 0.568111  [64064/150000]\n",
      "loss: 0.698848  [70464/150000]\n",
      "loss: 0.625230  [76864/150000]\n",
      "loss: 0.600533  [83264/150000]\n",
      "loss: 0.591708  [89664/150000]\n",
      "loss: 0.605737  [96064/150000]\n",
      "loss: 0.617584  [102464/150000]\n",
      "loss: 0.605771  [108864/150000]\n",
      "loss: 0.697467  [115264/150000]\n",
      "loss: 0.591232  [121664/150000]\n",
      "loss: 0.552443  [128064/150000]\n",
      "loss: 0.592739  [134464/150000]\n",
      "loss: 0.589756  [140864/150000]\n",
      "loss: 0.534506  [147264/150000]\n",
      "Epoch 13\n",
      "-------------------------------\n",
      "loss: 0.640633  [   64/150000]\n",
      "loss: 0.697667  [ 6464/150000]\n",
      "loss: 0.592239  [12864/150000]\n",
      "loss: 0.595909  [19264/150000]\n",
      "loss: 0.538048  [25664/150000]\n",
      "loss: 0.567052  [32064/150000]\n",
      "loss: 0.565347  [38464/150000]\n",
      "loss: 0.628676  [44864/150000]\n",
      "loss: 0.532067  [51264/150000]\n",
      "loss: 0.569317  [57664/150000]\n",
      "loss: 0.662629  [64064/150000]\n",
      "loss: 0.690312  [70464/150000]\n",
      "loss: 0.549885  [76864/150000]\n",
      "loss: 0.621373  [83264/150000]\n",
      "loss: 0.593378  [89664/150000]\n",
      "loss: 0.655877  [96064/150000]\n",
      "loss: 0.596402  [102464/150000]\n",
      "loss: 0.670918  [108864/150000]\n",
      "loss: 0.597230  [115264/150000]\n",
      "loss: 0.592003  [121664/150000]\n",
      "loss: 0.618691  [128064/150000]\n",
      "loss: 0.580073  [134464/150000]\n",
      "loss: 0.650928  [140864/150000]\n",
      "loss: 0.595747  [147264/150000]\n",
      "Epoch 14\n",
      "-------------------------------\n",
      "loss: 0.631570  [   64/150000]\n",
      "loss: 0.581386  [ 6464/150000]\n",
      "loss: 0.530600  [12864/150000]\n",
      "loss: 0.649409  [19264/150000]\n",
      "loss: 0.652399  [25664/150000]\n",
      "loss: 0.567224  [32064/150000]\n",
      "loss: 0.562777  [38464/150000]\n",
      "loss: 0.475368  [44864/150000]\n",
      "loss: 0.525190  [51264/150000]\n",
      "loss: 0.599928  [57664/150000]\n",
      "loss: 0.548746  [64064/150000]\n",
      "loss: 0.474745  [70464/150000]\n",
      "loss: 0.626452  [76864/150000]\n",
      "loss: 0.503386  [83264/150000]\n",
      "loss: 0.494949  [89664/150000]\n",
      "loss: 0.615882  [96064/150000]\n",
      "loss: 0.583681  [102464/150000]\n",
      "loss: 0.605108  [108864/150000]\n",
      "loss: 0.607096  [115264/150000]\n",
      "loss: 0.582837  [121664/150000]\n",
      "loss: 0.605536  [128064/150000]\n",
      "loss: 0.604692  [134464/150000]\n",
      "loss: 0.584882  [140864/150000]\n",
      "loss: 0.534502  [147264/150000]\n",
      "Epoch 15\n",
      "-------------------------------\n",
      "loss: 0.544695  [   64/150000]\n",
      "loss: 0.573055  [ 6464/150000]\n",
      "loss: 0.550669  [12864/150000]\n",
      "loss: 0.723938  [19264/150000]\n",
      "loss: 0.584465  [25664/150000]\n",
      "loss: 0.569426  [32064/150000]\n",
      "loss: 0.607056  [38464/150000]\n",
      "loss: 0.616925  [44864/150000]\n",
      "loss: 0.642344  [51264/150000]\n",
      "loss: 0.549334  [57664/150000]\n",
      "loss: 0.587071  [64064/150000]\n",
      "loss: 0.615041  [70464/150000]\n",
      "loss: 0.579657  [76864/150000]\n",
      "loss: 0.601171  [83264/150000]\n",
      "loss: 0.612851  [89664/150000]\n",
      "loss: 0.618960  [96064/150000]\n",
      "loss: 0.601881  [102464/150000]\n",
      "loss: 0.640874  [108864/150000]\n",
      "loss: 0.504944  [115264/150000]\n",
      "loss: 0.580753  [121664/150000]\n",
      "loss: 0.576069  [128064/150000]\n",
      "loss: 0.545511  [134464/150000]\n",
      "loss: 0.611074  [140864/150000]\n",
      "loss: 0.621706  [147264/150000]\n",
      "Epoch 16\n",
      "-------------------------------\n",
      "loss: 0.524734  [   64/150000]\n",
      "loss: 0.669807  [ 6464/150000]\n",
      "loss: 0.589392  [12864/150000]\n",
      "loss: 0.582379  [19264/150000]\n",
      "loss: 0.664578  [25664/150000]\n",
      "loss: 0.615774  [32064/150000]\n",
      "loss: 0.622845  [38464/150000]\n",
      "loss: 0.538043  [44864/150000]\n",
      "loss: 0.562914  [51264/150000]\n",
      "loss: 0.582615  [57664/150000]\n",
      "loss: 0.512658  [64064/150000]\n",
      "loss: 0.580139  [70464/150000]\n",
      "loss: 0.594151  [76864/150000]\n",
      "loss: 0.647114  [83264/150000]\n",
      "loss: 0.592874  [89664/150000]\n",
      "loss: 0.643142  [96064/150000]\n",
      "loss: 0.584972  [102464/150000]\n",
      "loss: 0.561617  [108864/150000]\n",
      "loss: 0.597275  [115264/150000]\n",
      "loss: 0.599875  [121664/150000]\n",
      "loss: 0.532628  [128064/150000]\n",
      "loss: 0.521613  [134464/150000]\n",
      "loss: 0.664630  [140864/150000]\n",
      "loss: 0.608385  [147264/150000]\n",
      "Epoch 17\n",
      "-------------------------------\n",
      "loss: 0.621746  [   64/150000]\n",
      "loss: 0.511989  [ 6464/150000]\n",
      "loss: 0.563653  [12864/150000]\n",
      "loss: 0.562508  [19264/150000]\n",
      "loss: 0.498100  [25664/150000]\n",
      "loss: 0.594418  [32064/150000]\n",
      "loss: 0.583726  [38464/150000]\n",
      "loss: 0.588750  [44864/150000]\n",
      "loss: 0.521480  [51264/150000]\n",
      "loss: 0.579815  [57664/150000]\n",
      "loss: 0.542970  [64064/150000]\n",
      "loss: 0.544266  [70464/150000]\n",
      "loss: 0.571421  [76864/150000]\n",
      "loss: 0.526584  [83264/150000]\n",
      "loss: 0.591572  [89664/150000]\n",
      "loss: 0.611933  [96064/150000]\n",
      "loss: 0.562910  [102464/150000]\n",
      "loss: 0.613617  [108864/150000]\n",
      "loss: 0.639856  [115264/150000]\n",
      "loss: 0.555695  [121664/150000]\n",
      "loss: 0.595588  [128064/150000]\n",
      "loss: 0.634104  [134464/150000]\n",
      "loss: 0.585836  [140864/150000]\n",
      "loss: 0.635098  [147264/150000]\n",
      "Epoch 18\n",
      "-------------------------------\n",
      "loss: 0.599146  [   64/150000]\n",
      "loss: 0.618983  [ 6464/150000]\n",
      "loss: 0.562647  [12864/150000]\n",
      "loss: 0.629981  [19264/150000]\n",
      "loss: 0.557569  [25664/150000]\n",
      "loss: 0.644682  [32064/150000]\n",
      "loss: 0.552329  [38464/150000]\n",
      "loss: 0.632133  [44864/150000]\n",
      "loss: 0.667953  [51264/150000]\n",
      "loss: 0.579287  [57664/150000]\n",
      "loss: 0.551793  [64064/150000]\n",
      "loss: 0.516235  [70464/150000]\n",
      "loss: 0.623689  [76864/150000]\n",
      "loss: 0.583543  [83264/150000]\n",
      "loss: 0.602581  [89664/150000]\n",
      "loss: 0.617562  [96064/150000]\n",
      "loss: 0.589883  [102464/150000]\n",
      "loss: 0.594558  [108864/150000]\n",
      "loss: 0.591930  [115264/150000]\n",
      "loss: 0.598964  [121664/150000]\n",
      "loss: 0.631221  [128064/150000]\n",
      "loss: 0.569307  [134464/150000]\n",
      "loss: 0.626202  [140864/150000]\n",
      "loss: 0.579201  [147264/150000]\n",
      "Epoch 19\n",
      "-------------------------------\n",
      "loss: 0.588934  [   64/150000]\n",
      "loss: 0.588312  [ 6464/150000]\n",
      "loss: 0.513917  [12864/150000]\n",
      "loss: 0.606465  [19264/150000]\n",
      "loss: 0.592342  [25664/150000]\n",
      "loss: 0.615711  [32064/150000]\n",
      "loss: 0.557372  [38464/150000]\n",
      "loss: 0.559006  [44864/150000]\n",
      "loss: 0.585443  [51264/150000]\n",
      "loss: 0.485954  [57664/150000]\n",
      "loss: 0.589985  [64064/150000]\n",
      "loss: 0.587740  [70464/150000]\n",
      "loss: 0.629844  [76864/150000]\n",
      "loss: 0.521275  [83264/150000]\n",
      "loss: 0.603965  [89664/150000]\n",
      "loss: 0.644880  [96064/150000]\n",
      "loss: 0.630848  [102464/150000]\n",
      "loss: 0.571996  [108864/150000]\n",
      "loss: 0.580071  [115264/150000]\n",
      "loss: 0.593456  [121664/150000]\n",
      "loss: 0.551542  [128064/150000]\n",
      "loss: 0.611395  [134464/150000]\n",
      "loss: 0.577660  [140864/150000]\n",
      "loss: 0.612326  [147264/150000]\n",
      "Epoch 20\n",
      "-------------------------------\n",
      "loss: 0.627417  [   64/150000]\n",
      "loss: 0.637460  [ 6464/150000]\n",
      "loss: 0.666766  [12864/150000]\n",
      "loss: 0.531427  [19264/150000]\n",
      "loss: 0.562119  [25664/150000]\n",
      "loss: 0.587002  [32064/150000]\n",
      "loss: 0.652636  [38464/150000]\n",
      "loss: 0.559747  [44864/150000]\n",
      "loss: 0.587982  [51264/150000]\n",
      "loss: 0.601106  [57664/150000]\n",
      "loss: 0.565837  [64064/150000]\n",
      "loss: 0.607227  [70464/150000]\n",
      "loss: 0.626141  [76864/150000]\n",
      "loss: 0.640508  [83264/150000]\n",
      "loss: 0.605786  [89664/150000]\n",
      "loss: 0.608159  [96064/150000]\n",
      "loss: 0.530527  [102464/150000]\n",
      "loss: 0.587791  [108864/150000]\n",
      "loss: 0.593820  [115264/150000]\n",
      "loss: 0.615074  [121664/150000]\n",
      "loss: 0.616388  [128064/150000]\n",
      "loss: 0.569421  [134464/150000]\n",
      "loss: 0.607489  [140864/150000]\n",
      "loss: 0.645023  [147264/150000]\n",
      "Epoch 21\n",
      "-------------------------------\n",
      "loss: 0.603664  [   64/150000]\n",
      "loss: 0.614986  [ 6464/150000]\n",
      "loss: 0.521039  [12864/150000]\n",
      "loss: 0.547322  [19264/150000]\n",
      "loss: 0.577473  [25664/150000]\n",
      "loss: 0.597063  [32064/150000]\n",
      "loss: 0.585668  [38464/150000]\n",
      "loss: 0.541477  [44864/150000]\n",
      "loss: 0.588172  [51264/150000]\n",
      "loss: 0.645711  [57664/150000]\n",
      "loss: 0.617174  [64064/150000]\n",
      "loss: 0.659786  [70464/150000]\n",
      "loss: 0.518750  [76864/150000]\n",
      "loss: 0.564853  [83264/150000]\n",
      "loss: 0.540514  [89664/150000]\n",
      "loss: 0.677545  [96064/150000]\n",
      "loss: 0.583586  [102464/150000]\n",
      "loss: 0.587650  [108864/150000]\n",
      "loss: 0.618852  [115264/150000]\n",
      "loss: 0.546123  [121664/150000]\n",
      "loss: 0.611922  [128064/150000]\n",
      "loss: 0.769938  [134464/150000]\n",
      "loss: 0.654696  [140864/150000]\n",
      "loss: 0.555877  [147264/150000]\n",
      "Epoch 22\n",
      "-------------------------------\n",
      "loss: 0.546781  [   64/150000]\n",
      "loss: 0.663820  [ 6464/150000]\n",
      "loss: 0.586681  [12864/150000]\n",
      "loss: 0.545602  [19264/150000]\n",
      "loss: 0.622281  [25664/150000]\n",
      "loss: 0.613386  [32064/150000]\n",
      "loss: 0.679810  [38464/150000]\n",
      "loss: 0.561913  [44864/150000]\n",
      "loss: 0.588024  [51264/150000]\n",
      "loss: 0.646534  [57664/150000]\n",
      "loss: 0.567236  [64064/150000]\n",
      "loss: 0.582543  [70464/150000]\n",
      "loss: 0.581364  [76864/150000]\n",
      "loss: 0.589139  [83264/150000]\n",
      "loss: 0.651509  [89664/150000]\n",
      "loss: 0.562445  [96064/150000]\n",
      "loss: 0.679964  [102464/150000]\n",
      "loss: 0.560593  [108864/150000]\n",
      "loss: 0.535673  [115264/150000]\n",
      "loss: 0.605118  [121664/150000]\n",
      "loss: 0.591122  [128064/150000]\n",
      "loss: 0.576950  [134464/150000]\n",
      "loss: 0.554719  [140864/150000]\n",
      "loss: 0.568256  [147264/150000]\n",
      "Epoch 23\n",
      "-------------------------------\n",
      "loss: 0.515077  [   64/150000]\n",
      "loss: 0.553921  [ 6464/150000]\n",
      "loss: 0.577074  [12864/150000]\n",
      "loss: 0.585402  [19264/150000]\n",
      "loss: 0.611172  [25664/150000]\n",
      "loss: 0.587801  [32064/150000]\n",
      "loss: 0.588383  [38464/150000]\n",
      "loss: 0.711319  [44864/150000]\n",
      "loss: 0.527017  [51264/150000]\n",
      "loss: 0.678208  [57664/150000]\n",
      "loss: 0.589850  [64064/150000]\n",
      "loss: 0.584371  [70464/150000]\n",
      "loss: 0.581527  [76864/150000]\n",
      "loss: 0.653242  [83264/150000]\n",
      "loss: 0.613740  [89664/150000]\n",
      "loss: 0.525126  [96064/150000]\n",
      "loss: 0.609433  [102464/150000]\n",
      "loss: 0.608985  [108864/150000]\n",
      "loss: 0.682752  [115264/150000]\n",
      "loss: 0.592517  [121664/150000]\n",
      "loss: 0.597739  [128064/150000]\n",
      "loss: 0.542855  [134464/150000]\n",
      "loss: 0.600933  [140864/150000]\n",
      "loss: 0.548048  [147264/150000]\n",
      "Epoch 24\n",
      "-------------------------------\n",
      "loss: 0.538409  [   64/150000]\n",
      "loss: 0.587732  [ 6464/150000]\n",
      "loss: 0.553931  [12864/150000]\n",
      "loss: 0.615992  [19264/150000]\n",
      "loss: 0.546581  [25664/150000]\n",
      "loss: 0.550231  [32064/150000]\n",
      "loss: 0.648659  [38464/150000]\n",
      "loss: 0.633638  [44864/150000]\n",
      "loss: 0.550732  [51264/150000]\n",
      "loss: 0.543928  [57664/150000]\n",
      "loss: 0.617104  [64064/150000]\n",
      "loss: 0.560582  [70464/150000]\n",
      "loss: 0.669584  [76864/150000]\n",
      "loss: 0.601878  [83264/150000]\n",
      "loss: 0.527612  [89664/150000]\n",
      "loss: 0.572111  [96064/150000]\n",
      "loss: 0.598589  [102464/150000]\n",
      "loss: 0.603640  [108864/150000]\n",
      "loss: 0.608930  [115264/150000]\n",
      "loss: 0.534105  [121664/150000]\n",
      "loss: 0.629878  [128064/150000]\n",
      "loss: 0.611613  [134464/150000]\n",
      "loss: 0.513288  [140864/150000]\n",
      "loss: 0.635841  [147264/150000]\n",
      "Epoch 25\n",
      "-------------------------------\n",
      "loss: 0.557434  [   64/150000]\n",
      "loss: 0.615803  [ 6464/150000]\n",
      "loss: 0.579832  [12864/150000]\n",
      "loss: 0.575765  [19264/150000]\n",
      "loss: 0.606995  [25664/150000]\n",
      "loss: 0.632700  [32064/150000]\n",
      "loss: 0.632381  [38464/150000]\n",
      "loss: 0.615837  [44864/150000]\n",
      "loss: 0.587369  [51264/150000]\n",
      "loss: 0.600212  [57664/150000]\n",
      "loss: 0.616872  [64064/150000]\n",
      "loss: 0.609934  [70464/150000]\n",
      "loss: 0.662792  [76864/150000]\n",
      "loss: 0.589709  [83264/150000]\n",
      "loss: 0.618774  [89664/150000]\n",
      "loss: 0.645855  [96064/150000]\n",
      "loss: 0.551524  [102464/150000]\n",
      "loss: 0.533778  [108864/150000]\n",
      "loss: 0.577648  [115264/150000]\n",
      "loss: 0.544056  [121664/150000]\n",
      "loss: 0.614962  [128064/150000]\n",
      "loss: 0.539947  [134464/150000]\n",
      "loss: 0.612202  [140864/150000]\n",
      "loss: 0.560787  [147264/150000]\n",
      "Epoch 26\n",
      "-------------------------------\n",
      "loss: 0.562039  [   64/150000]\n",
      "loss: 0.557390  [ 6464/150000]\n",
      "loss: 0.611007  [12864/150000]\n",
      "loss: 0.615194  [19264/150000]\n",
      "loss: 0.574516  [25664/150000]\n",
      "loss: 0.613461  [32064/150000]\n",
      "loss: 0.606872  [38464/150000]\n",
      "loss: 0.565459  [44864/150000]\n",
      "loss: 0.585175  [51264/150000]\n",
      "loss: 0.625837  [57664/150000]\n",
      "loss: 0.561817  [64064/150000]\n",
      "loss: 0.596336  [70464/150000]\n",
      "loss: 0.600057  [76864/150000]\n",
      "loss: 0.573806  [83264/150000]\n",
      "loss: 0.641340  [89664/150000]\n",
      "loss: 0.608286  [96064/150000]\n",
      "loss: 0.649721  [102464/150000]\n",
      "loss: 0.671843  [108864/150000]\n",
      "loss: 0.564814  [115264/150000]\n",
      "loss: 0.632006  [121664/150000]\n",
      "loss: 0.537901  [128064/150000]\n",
      "loss: 0.519129  [134464/150000]\n",
      "loss: 0.604893  [140864/150000]\n",
      "loss: 0.565878  [147264/150000]\n",
      "Epoch 27\n",
      "-------------------------------\n",
      "loss: 0.637899  [   64/150000]\n",
      "loss: 0.619308  [ 6464/150000]\n",
      "loss: 0.565980  [12864/150000]\n",
      "loss: 0.580520  [19264/150000]\n",
      "loss: 0.517601  [25664/150000]\n",
      "loss: 0.600046  [32064/150000]\n",
      "loss: 0.522602  [38464/150000]\n",
      "loss: 0.585224  [44864/150000]\n",
      "loss: 0.616364  [51264/150000]\n",
      "loss: 0.607755  [57664/150000]\n",
      "loss: 0.672700  [64064/150000]\n",
      "loss: 0.615942  [70464/150000]\n",
      "loss: 0.563604  [76864/150000]\n",
      "loss: 0.516980  [83264/150000]\n",
      "loss: 0.568126  [89664/150000]\n",
      "loss: 0.643335  [96064/150000]\n",
      "loss: 0.598452  [102464/150000]\n",
      "loss: 0.645684  [108864/150000]\n",
      "loss: 0.615591  [115264/150000]\n",
      "loss: 0.621618  [121664/150000]\n",
      "loss: 0.534854  [128064/150000]\n",
      "loss: 0.589854  [134464/150000]\n",
      "loss: 0.635436  [140864/150000]\n",
      "loss: 0.554766  [147264/150000]\n",
      "Epoch 28\n",
      "-------------------------------\n",
      "loss: 0.521061  [   64/150000]\n",
      "loss: 0.636044  [ 6464/150000]\n",
      "loss: 0.637459  [12864/150000]\n",
      "loss: 0.643848  [19264/150000]\n",
      "loss: 0.644321  [25664/150000]\n",
      "loss: 0.495741  [32064/150000]\n",
      "loss: 0.607640  [38464/150000]\n",
      "loss: 0.588970  [44864/150000]\n",
      "loss: 0.559616  [51264/150000]\n",
      "loss: 0.594282  [57664/150000]\n",
      "loss: 0.576745  [64064/150000]\n",
      "loss: 0.550133  [70464/150000]\n",
      "loss: 0.598407  [76864/150000]\n",
      "loss: 0.547898  [83264/150000]\n",
      "loss: 0.620289  [89664/150000]\n",
      "loss: 0.625945  [96064/150000]\n",
      "loss: 0.588795  [102464/150000]\n",
      "loss: 0.564310  [108864/150000]\n",
      "loss: 0.471800  [115264/150000]\n",
      "loss: 0.661256  [121664/150000]\n",
      "loss: 0.512393  [128064/150000]\n",
      "loss: 0.518628  [134464/150000]\n",
      "loss: 0.637960  [140864/150000]\n",
      "loss: 0.634269  [147264/150000]\n",
      "Epoch 29\n",
      "-------------------------------\n",
      "loss: 0.532436  [   64/150000]\n",
      "loss: 0.582553  [ 6464/150000]\n",
      "loss: 0.599198  [12864/150000]\n",
      "loss: 0.599311  [19264/150000]\n",
      "loss: 0.537017  [25664/150000]\n",
      "loss: 0.545210  [32064/150000]\n",
      "loss: 0.594809  [38464/150000]\n",
      "loss: 0.560428  [44864/150000]\n",
      "loss: 0.585562  [51264/150000]\n",
      "loss: 0.615488  [57664/150000]\n",
      "loss: 0.601441  [64064/150000]\n",
      "loss: 0.586801  [70464/150000]\n",
      "loss: 0.612262  [76864/150000]\n",
      "loss: 0.569715  [83264/150000]\n",
      "loss: 0.617154  [89664/150000]\n",
      "loss: 0.615478  [96064/150000]\n",
      "loss: 0.627287  [102464/150000]\n",
      "loss: 0.594675  [108864/150000]\n",
      "loss: 0.527844  [115264/150000]\n",
      "loss: 0.601432  [121664/150000]\n",
      "loss: 0.560899  [128064/150000]\n",
      "loss: 0.590129  [134464/150000]\n",
      "loss: 0.634971  [140864/150000]\n",
      "loss: 0.612034  [147264/150000]\n",
      "Epoch 30\n",
      "-------------------------------\n",
      "loss: 0.663320  [   64/150000]\n",
      "loss: 0.590303  [ 6464/150000]\n",
      "loss: 0.548620  [12864/150000]\n",
      "loss: 0.606458  [19264/150000]\n",
      "loss: 0.551339  [25664/150000]\n",
      "loss: 0.597070  [32064/150000]\n",
      "loss: 0.568118  [38464/150000]\n",
      "loss: 0.656885  [44864/150000]\n",
      "loss: 0.517651  [51264/150000]\n",
      "loss: 0.619143  [57664/150000]\n",
      "loss: 0.576512  [64064/150000]\n",
      "loss: 0.551789  [70464/150000]\n",
      "loss: 0.639050  [76864/150000]\n",
      "loss: 0.568361  [83264/150000]\n",
      "loss: 0.549884  [89664/150000]\n",
      "loss: 0.634776  [96064/150000]\n",
      "loss: 0.555386  [102464/150000]\n",
      "loss: 0.525088  [108864/150000]\n",
      "loss: 0.613388  [115264/150000]\n",
      "loss: 0.529327  [121664/150000]\n",
      "loss: 0.554036  [128064/150000]\n",
      "loss: 0.570808  [134464/150000]\n",
      "loss: 0.573866  [140864/150000]\n",
      "loss: 0.593912  [147264/150000]\n",
      "Done!\n",
      "Train data:\n",
      "AUC value is: 0.6999572362931573\n",
      "Accuracy is: 0.6615666666666666\n",
      "Test data:\n",
      "AUC value is: 0.7051648361054951\n",
      "Accuracy is: 0.66606\n"
     ]
    }
   ],
   "source": [
    "attack_model = shadow_attack(sha_models=sha_models, tar_model=tar_model, model_num=num_shadowsets, weight_dir=weight_dir, data_name=DATA_NAME, model=MODEL, model_transform=model_transform, \n",
    "                  model_epochs=EPOCHS, batch_size=BATCH_SIZE, learning_rate=attack_lr, attack_epochs=30, attack_transform=attack_transform, \n",
    "                  device=device, prop_keep=0.5, top_k=None, attack_class=attack_class)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "id": "ab8c2490-c230-4516-a19a-df84a3779011",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Attack_NN(\n",
       "  (linear_relu_stack): Sequential(\n",
       "    (0): Linear(in_features=11, out_features=128, bias=True)\n",
       "    (1): ReLU()\n",
       "    (2): Linear(in_features=128, out_features=64, bias=True)\n",
       "    (3): ReLU()\n",
       "    (4): Linear(in_features=64, out_features=1, bias=True)\n",
       "  )\n",
       ")"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "attack_model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "id": "d1f87436-bae2-410f-a025-e9757ecd17d9",
   "metadata": {},
   "outputs": [],
   "source": [
    "targetX = conf_data_all[tar_model].astype(np.float32)\n",
    "targetY = train_keep[tar_model]\n",
    "targetX = np.concatenate((targetX, Y_data.reshape(Y_data.shape[0],1)), 1)\n",
    "targetX = targetX.astype(np.float32)\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "id": "ebd65e38-9469-4c5f-a5a3-041b675a9d3c",
   "metadata": {},
   "outputs": [],
   "source": [
    "top_k = None\n",
    "if top_k:\n",
    "    # 仅使用概率向量的前3个值\n",
    "    targetX, _ = get_top_k_conf(top_k, targetX, targetX)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "id": "8ba8d2b7-7d6c-4e17-95bd-f854e8af5af9",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "AUC value is: 0.7051648425054993\n",
      "Accuracy is: 0.66606\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "0.66606"
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "shadow_attack_data = CustomDataset(targetX, targetY, attack_transform)\n",
    "shadow_attack_dataloader = DataLoader(shadow_attack_data, batch_size=batch_size, shuffle=True)\n",
    "attack_test_scores, attack_test_mem = get_attack_pred(shadow_attack_dataloader, attack_model, device)\n",
    "attack_test_scores, attack_test_mem = attack_test_scores.detach().cpu().numpy(), attack_test_mem.detach().cpu().numpy()\n",
    "evaluate_ROC(attack_test_scores, attack_test_mem)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "id": "c8cb250e-ad72-40b3-8967-46b8538be791",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "AUC value is: 0.6892114804089503\n",
      "Accuracy is: 0.6596\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "0.6596"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "evaluate_ROC(attack_test_scores[risk_rank[:2500]], attack_test_mem[risk_rank[:2500]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "eebaa471-2271-4310-b936-1030edb6f096",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c4bbfa8f-bd1a-46f6-9b79-a7c758d30f27",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b077751a-c0ee-4e39-9f70-41b791d348e6",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f06a3e24-5a57-472a-b6f0-78961403f471",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "40ceb18e-621c-4c72-8ef7-94498c7b65c5",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "30abf80e-2b00-4b70-ac0c-a7099f987cc4",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "9f0ae045-14bd-4159-948c-8881f74cd181",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d64311d6-c09d-47b3-8076-f11eb10134aa",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "87d308e9-be9d-40fb-be9e-e84efc339052",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0308f234-469f-4670-b4e4-51973cb27ce4",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "a9b3ad57-7f95-4bc7-8057-05aa0ca4e10a",
   "metadata": {},
   "source": [
    "### 绘制攻击成功率随风险变化曲线"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "03e43bb9-0680-487b-8433-805cb202031f",
   "metadata": {},
   "outputs": [],
   "source": [
    "pred_all = conf_data_all.argmax(2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "b7dda5b4-e79d-40b7-beb8-86d21143ac11",
   "metadata": {},
   "outputs": [],
   "source": [
    "base_att = (pred_all == Y_data)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "a79890e7-c8ea-4ddd-9e0a-c51831260ad9",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(100, 50000)"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "base_att.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "18020329-0447-496c-bffa-a59f865df12c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(100, 50000)"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_keep.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "67ad94bc-8667-4f2b-ab7b-3d32a98e97d8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(50000,)"
      ]
     },
     "execution_count": 30,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pri_risk_all.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "973858e9-c4fd-47cd-9941-c5730dfff80a",
   "metadata": {},
   "outputs": [],
   "source": [
    "X_axi = []\n",
    "Y_axi = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "4d44c0fa-c7b9-4f39-a245-0f8e3671141c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(50000,)"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pri_risk_rank.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "030de03c-e397-4141-8f84-6cd9910e91a7",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "# 输入loss_data_all，pri_risk_rank，train_keep，conf_data_all，pri_risk_all\n",
    "\n",
    "for i in range(100): # 对数据下标进行计数\n",
    "    start = i * 500\n",
    "    end = (i+1) * 500\n",
    "    risk_t = pri_risk_all[pri_risk_rank[start:end]]\n",
    "    for j in range(100): # 对目标模型进行计数\n",
    "        pred_temp = base_att[j][pri_risk_rank[start:end]]\n",
    "        mem_temp = train_keep[j][pri_risk_rank[start:end]]\n",
    "        if j==0:\n",
    "            pred_t = pred_temp\n",
    "            mem_t = mem_temp\n",
    "        else:\n",
    "            pred_t = np.concatenate((pred_t, pred_temp), 0)\n",
    "            mem_t = np.concatenate((mem_t, mem_temp), 0)\n",
    "    acc = metrics.accuracy_score(mem_t, pred_t)\n",
    "    risk = np.mean(risk_t)\n",
    "    X_axi.append(risk)\n",
    "    Y_axi.append(acc)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "a4df62ec-4eb6-4c8f-a522-737a9e0d0e06",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "238bf52f-5f07-4176-9642-bd34be61a5a6",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj8AAAGwCAYAAABGogSnAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABIJUlEQVR4nO3dd3xUVf7/8fekF1KAQAIhBELvYJBQBBUjiC6KuIqIgii4q+CicVVYBdTdn6zriqjLgoViF1Fsi18Eg4hAaKFXadITSCCF9Mzc3x+BgZhCBiaZmczr+XjMA3Ln3Dufy81435577rkmwzAMAQAAuAkPRxcAAABQkwg/AADArRB+AACAWyH8AAAAt0L4AQAAboXwAwAA3ArhBwAAuBUvRxdQ0ywWi06cOKGgoCCZTCZHlwMAAKrAMAxlZ2ercePG8vC4ur4btws/J06cUFRUlKPLAAAAV+Do0aNq0qTJVW3D7cJPUFCQpJJ/vODgYAdXAwAAqiIrK0tRUVHW8/jVcLvwc+FSV3BwMOEHAAAXY48hKwx4BgAAboXwAwAA3ArhBwAAuBXCDwAAcCuEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANwK4QcAALgVwg8AAHArbvdgUwAAYF+GYaig2KK8QrPyiswymaRGIf6OLqtChB8AAGq5wmKL8orMyi8yK7fQbA0p1j+LzMorLD7/s6Xk7xfa/m69/PKWF5llGBc/r0ezevr8z70ct8OXQfgBAMCBzBbjfNAoVn6hRblFxWXDSSV/5haZlX/+59zz4cT69/PLiy3G5QuxEx9PD5lMNfZxV4TwAwBABSwWQ/nF50PG74JF3vnQUV4vSF7h73tVKv6z0Gypsf3x9DApwNtTfj6e8vf2VICPp/y8L/n7Jcv9vUveC/DxlL/PJX8/397//HLr388v9/J0/uHEhB8AgEv6/TiTMj0iv+8FuWS59TJPUenLPL8PLflFNRdMTCaVDhaV/GkNJz6eCvC+NJx4nW/nIX9vrzLreXuaZHL2bpkaQPgBAFSL8seZFCuv0HLxMk8Fl26uZJxJdfPz9qig18NL/hfeuyR8BPh4le5VOb9emV6V88t9vTwIJjWE8AMAuGIZuYU6cPqcDpzKKfnz9DkdOJ2jY2dzVWSu2XEmft4lgaPCXpPfhRabLvl4ecrDg2BSWxB+AACVMlsMncjI0/7T53Tg1LlSYSc9p/Cy6186zqS8Xo9LL91U5ZKPv4+nAry95OdzsSfGFcaZwHkQfgAAkqTcwmIdPJ1j7b05cD7sHErLUUFxxWNfGof4qUXDOmrRoI5aNAhUiwZ1FB0WqDq+JZeAfLwIJnAuhB8AcCOGYej0uYIyl6kOnDqn4xl5Fa7n4+WhmLDAiwHnfNhpHhaoQF9OJXAt/MYCQC1UZLboyJlcHTh17vzlqothJzu/uML16gX6WHtvWjSooxYNS/7epG6APBnzglqC8AMALiwrv+j8OJyLl6kOnD6nw+m5FU5s52GSouoFlLpM1bJhHcU0qKN6gT41vAdAzSP8AICTs1gMnczKvzjY+HxPzv7T53Q6u6DC9QJ8PEsFnAuXqqLrB8jP27MG9wBwLoQfAHASFouhg2k52pOSVeoy1cHTOcorMle4Xniw78XLVJeMx4kI9uP2bKAchB8AcADDMHQ4PVfbjmdq+7EMbTuWqR3HM5VTWH7I8fIwqVlYYKnLVC0a1FFMg0AF+XnXcPWAayP8AEA1MwxDxzPytP1YprYdz9S2YxnafixTWeUMPPbz9lDbiGC1alin1O3jUfUC5M1cNoBdEH4AwM5Ss/K19WiGth/P1LZjmdp+PFNnypkM0MfLQ+0aBatLkxB1igxR5yahatEgkAn7gGpG+AGAq5B2rqCkR+dYprYfL7l8daqcQcheHia1bRSkTpGh6nw+7LQOD2ICQMABCD8AUEUZuYUXe3OOlVy+OpGZX6adh0lqHR5U0psTFarOkSFqExHEHVaAkyD8AEA5svOLtON4lrYfz9DW82HnyJncMu1MJqlFgzrqHBmiTk1C1LlJiNo3CpG/D0EHcFaEHwBuL7ewWDtPZJ3v0cnQtuOZOng6p9y2zeoHqFOTUGvY6dA4mLutABdD+AHgdorMFm09mqFV+9O0Zn+6Nh05W+5syJGh/urcpGQgcucmIerYOEQhAQQdwNURfgDUeoZhaG9qtlbvT9fq/WladzC9zHw64cG+JSHnfI9Op8gQ1a/j66CKAVQnwg+AWul4Rp5W70sr6d05kK60c6XvwKob4K3eLcN0Xcsw9WkRpqb1AxxUKYCaRvgBUCuczSlU0sGSnp3V+9P0W3rpwcn+3p7q0bye+rSsrz4tw9QuIphHPwBuivADwCXlF5m14bcz1nE7O05kyrhk2I6nh0ldmoToupZh6t0yTN2ahsrXizuwABB+ALiIYrNF249nas2BdK3al6bkw2dVaLaUatM6vI56tyi5lBUXU4+7sACUi/ADwCkZhqEDp3O0en/JuJ21B9OV/btnYTUK8VOflmEll7JahKlhsJ+DqgXgSgg/AJzGycw8rd6frjXnBymnZJWePTnYz0u9WtS3XsqKCQuUycS4HQC2IfwAcJhLByknHUjXwbTSEwv6eHno2mZ1rZeyOkaGyJNBygCuEuEHQI3JLSzW+kNntOZASeDZdTKr1CBlD5PUqUmo+rQouSMrNrouz8MCYHeEHwDVprDYoi1HM7R6f5rWHEjTlqMZKjKXnkn5wiDl3i3qKy6mvkL8GaQMoHoRfgDYjcViaNfJLK05kKbV+9O14bczyv3dTMqRof7WuXZ6taivhkEMUgZQswg/AK5KZm6Rvtt2omTczsF0ZeQWlXq/fqCPerWor94tSu7KalovgEHKAByK8APgiuQVmjV/zW+atWK/si65BT3Qx1NxMfXV+/y4nTbhQcykDMCpEH4A2KTYbNHC5GOa8eOvSs0qeV5W6/A6Gty5sXq3DFPnJiHy9vRwcJUAUDHCD4AqMQxD/7cjRf/+Ya/1lvTIUH89NaC17ugayS3oAFwG4QfAZa3Zn6ZXluzR1mOZkqR6gT4af2NLjejZlOdlAXA5hB8AFdpxPFOvLNmjX/alSSoZzzOmb4zG9otRHV/+8wHANfFfLwBl/JaWo38v3av/bTspSfL2NGlEXLTG92+psDq+Dq4OAK4O4QeA1ansfL2ZuE+frT+qYoshk0ka0jVST8a3VtP6AY4uDwDswuG3ZMycOVPNmjWTn5+f4uLitH79+grbFhUV6aWXXlKLFi3k5+enLl26aMmSJTVYLVA7ZeUX6d8/7NX1/1qhj9YeUbHF0A1tGmjx4331+rCuBB8AtYpDe34WLFighIQEzZ49W3FxcZoxY4YGDhyovXv3qmHDhmXaP//88/roo4/07rvvqm3btvrhhx905513as2aNerWrZsD9gBwbbmFxfp47RHNXLHfOjlht6ahevaWtuoZU9/B1QFA9TAZhmFcvln1iIuL07XXXqv//Oc/kiSLxaKoqCg9/vjjmjhxYpn2jRs31nPPPadx48ZZl911113y9/fXRx99VKXPzMrKUkhIiDIzMxUcHGyfHQFczOnsAn2Q9Js+XHvYGnpaNqyjpwe20YD24czADMDp2PP87bCen8LCQiUnJ2vSpEnWZR4eHoqPj1dSUlK56xQUFMjPr/RzgPz9/bVq1aoKP6egoEAFBQXWn7Oysq6ycsB17T+Vrfd+OaRFm4+rsNgiSYquH6BxN7TU0Gsi5cXkhADcgMPCT1pamsxms8LDw0stDw8P1549e8pdZ+DAgZo+fbr69eunFi1aKDExUYsWLZLZbC63vSRNmzZNL774ol1rB1yJYRhaf+iM3ll5UIl7TlmXd2saqj/1i9HN7SOYoBCAW3Gpu73eeOMNjR07Vm3btpXJZFKLFi00evRozZ07t8J1Jk2apISEBOvPWVlZioqKqolyAYcqNlu0ZGeK3l150Do5ockkDWgfrkf6xSg2up6DKwQAx3BY+AkLC5Onp6dSU1NLLU9NTVVERES56zRo0EBff/218vPzlZ6ersaNG2vixImKiYmp8HN8fX3l68u8JHAfOQXF+nzjUc1ZdUjHzuZJkny9PPTH2CZ6+LrmimlQx8EVAoBjOSz8+Pj4KDY2VomJiRoyZIikkgHPiYmJGj9+fKXr+vn5KTIyUkVFRfryyy91zz331EDFgHM7lZWv+Wt+00drD1ufsl4v0Ecje0XrgZ7Rqs/khAAgycGXvRISEjRq1Ch1795dPXr00IwZM5STk6PRo0dLkkaOHKnIyEhNmzZNkrRu3TodP35cXbt21fHjx/XCCy/IYrHomWeeceRuAA71a2q23vvloL7efEKF5pJBzM3DAjWmb3PddU0T+Xnz7C0AuJRDw8+wYcN0+vRpTZkyRSkpKeratauWLFliHQR95MgReXhcvPskPz9fzz//vA4ePKg6dero1ltv1YcffqjQ0FAH7QHgGIZh6Jd9aXpv1SGt/PW0dXn36Lp6pF+M4tuFy4NBzABQLofO8+MIzPMDV5ZfZNa3W07ovVUH9WvqOUmSh0ka2CFCY/rGKDa6roMrBIDqUSvm+QFQdennCvTR2iP6cO1vSjtXKKnkCevDrm2q0X2aKaoej58AgKoi/ABObP+pbM1ZdUhfbro4KWHjED+N7tNcw3pEKdjP28EVAoDrIfwATsYwDK3en673Vh3Uir0Xx/N0aRKiMX1jdEvHCHkzEzMAXDHCD+Akis0WfbX5uOasOqQ9KdmSLk5KOPb8eB6euQUAV4/wAziBjb+d0fNf77CGngAfT93TPUqj+zRTdP1AB1cHALUL4QdwoPRzBfrn/+3RwuRjkqTQAG/9qV8L3dejqUICGM8DANWB8AM4gMVi6LMNR/XKkj3KzCuSJN17bZSeuaWt6gX6OLg6AKjdCD9ADdtxPFPPfb1DW49mSJLaNwrW34d0ZI4eAKghhB+ghmTmFWn60r36cO1hWQwpyNdLCQNa64Ge0fLi7i0AqDGEH6CaGYahb7ac0D8W71bauQJJ0h1dG+u5W9upYbCfg6sDAPdD+AGq0b7UbE3+ZofWHjwjSYppEKh/3NFRvVuGObgyAHBfhB+gGuxNydYHSb9pwYajKrYY8vP20OP9W2ls3xj5eHGJCwAcifAD2ElhsUVLd6Xog6TDWn/ojHX5ze3DNeUP7Xn+FgA4CcIPcJVOZubp03VH9OmGozqdXTKmx9PDpJvbhWtk72j1bsElLgBwJoQf4AoYhqE1B9L1YdJhLdudKrPFkCQ1CPLV8B5NNbxHlBqF+Du4SgBAeQg/gA2y84v0RfIxfbj2sA6ezrEuj2teTw/0itaA9hGM6QEAJ0f4Aapo/aEzeuKzzTqRmS9JCvTx1NBrmuiBXtFqHR7k4OoAAFVF+AEuo9hs0ZvL9+s/y/fJYkhN6wVobL8Y3dktUnV8+QoBgKvhv9xAJY6dzdUTn23RxsNnJUl/jG2iF2/voEBCDwC4LP4LDlRg8baTmrhom7LzixXk66V/3NlRd3SNdHRZAICrRPgBfie3sFgvfrtLCzYelSR1axqqN+/txjw9AFBLEH6AS+w4nqm/fLZZB0/nyGSSxt3QUhPiW8mbB48CQK1B+AFUMm/P3NW/6ZX/26NCs0Xhwb56fVhXJigEgFqI8AO3dyanUE99vkU/7T0tqeRxFP+6q7PqBvo4uDIAQHUg/MCtbT2aocc+3qTjGXny8fLQ5Nva6f6e0TKZTI4uDQBQTQg/cEuGYeiT9Uf04re7VGi2qFn9AM26P1btGgU7ujQAQDUj/MDt5BWa9dzX27Vo03FJ0oD24fr3PV0U7Oft4MoAADWB8AO3cigtR49+lKw9KdnyMEnP3tJWj/SL4TIXALgRwg/cxg87U/TXz7cqu6BYYXV89Nbwa9SrRX1HlwUAqGGEH9R6xWaLXl26V2//fFCS1D26rmaOuEbhwX4OrgwA4AiEH9RqaecKNP6TTVp78Iwk6aE+zTXp1rZMWggAbozwg1rr4OlzGjVvvY6eyVOgj6de+WNn/aFzY0eXBQBwMMIPaqXkw2c15v0NOptbpKb1AjT3we5q2TDI0WUBAJwA4Qe1ztKdKXr8080qKLaoc5MQzRl1rRoE+Tq6LACAkyD8oFb5cO1hTf1mhyyG1L9tQ/3nvm4K8OHXHABwEWcF1AqGYehfP+zVrBUHJEnDe0Tp73d0lBcDmwEAv0P4gcsrLLbo2S+36avNJTM2J9zcWo/3b8nEhQCAchF+4NKy84v06EebtGp/mjw9TJo2tJPu6R7l6LIAAE6M8AOXlZqVr1Fz12tPSrYCfTz13/tjdX3rBo4uCwDg5Ag/cDmZeUVauPGo3ll5UKeyC9QgyFfzHrxWHSNDHF0aAMAFEH7gMvalZuv9pN/0ZfJx5RWZJUktGgRq/ugeiqoX4ODqAACugvADp2a2GPppzynNX/ObVu1Psy5vGxGkUb2baUjXSPn7eDqwQgCAqyH8wClduLT1QdJhHTmTK0nyMEk3tw/Xg72bq2dMPe7mAgBcEcIPnM7320/q6YVblVNYcmkrxN9b914bpft7RnN5CwBw1Qg/cCpzVx3S3xfvkmFIrcPraHSf5lzaAgDYFeEHTsFiMTTt/3br3V8OSZJG9orW1MEd5OnBpS0AgH0RfuBwBcVm/XXhNn239YQk6dlb2urP18cwpgcAUC0IP3CozLwi/enDjVp78Iy8PEx69e7OurNbE0eXBQCoxQg/cJiTmXl6cO4G7U3NVh1fL826/xr1bcUMzQCA6kX4gUP8mpqtUXPX62RmvhoG+Wre6GvVoTEzNAMAqh/hBzVu7cF0PfLBRmXlF6tFg0C9/1APNanLLewAgJpB+EGNWrjxqP721XYVmQ11j66r90Z1V2iAj6PLAgC4EcIPaoTFYujVpXs1a8UBSdJtnRrptXu6yM+b+XsAADWL8INql1tYrIQFW7VkZ4ok6fH+LfVkfGt5MIcPAMABCD+oVqlZ+Rrz/kZtP54pH08PvfLHTtzKDgBwKMIPqs2O45ka8/5GpWTlq16gj955IFbdm9VzdFkAADdH+EG1+GFnip74bIvyisxq1bCO5oy6Vk3rc0cXAMDxCD+wq6z8Ik1f+qveT/pNhiH1bRWmmSOuUbCft6NLAwBAEuEHdmIYhr7dekJ//99upZ0rkFTycNIpf2gvL08PB1cHAMBFhB9ctf2nsjX5651KOpguSYoJC9RLd3TUda3CHFwZAABlEX5wxXILi/XW8v1675eDKjIb8vXy0OP9W2psvxj5ejF/DwDAORF+cEV+2XdaE7/cruMZeZKkm9o21Au3d1BUPQY1AwCcG+EHNks+fFYPz9+oQrNFkaH+euH2Drq5fbijywIAoEocPhJ15syZatasmfz8/BQXF6f169dX2n7GjBlq06aN/P39FRUVpSeffFL5+fk1VC2OZ+TpTx+WBJ/4dg21LKEfwQcA4FJsDj+jRo3SypUr7fLhCxYsUEJCgqZOnapNmzapS5cuGjhwoE6dOlVu+08++UQTJ07U1KlTtXv3bs2ZM0cLFizQ3/72N7vUg8rlFhZr7PsblXauUG0jgvTGvd0U4EPnIQDAtdgcfjIzMxUfH69WrVrp5Zdf1vHjx6/4w6dPn66xY8dq9OjRat++vWbPnq2AgADNnTu33PZr1qxRnz59dN9996lZs2YaMGCAhg8fftneIlw9i8XQU59v1a6TWaof6KP3RnVXoC/BBwDgemwOP19//bWOHz+uRx99VAsWLFCzZs00aNAgffHFFyoqKqrydgoLC5WcnKz4+PiLxXh4KD4+XklJSeWu07t3byUnJ1vDzsGDB/X999/r1ltvrfBzCgoKlJWVVeoF272RuE//tyNF3p4mzX4gVk3qMrAZAOCarmjMT4MGDZSQkKCtW7dq3bp1atmypR544AE1btxYTz75pPbt23fZbaSlpclsNis8vPR4kfDwcKWkpJS7zn333aeXXnpJ1113nby9vdWiRQvdcMMNlV72mjZtmkJCQqyvqKgo23YW+t+2E3ojseSY/r8hnXQtz+cCALiwqxrwfPLkSS1btkzLli2Tp6enbr31Vm3fvl3t27fX66+/bq8arVasWKGXX35Z//3vf7Vp0yYtWrRIixcv1t///vcK15k0aZIyMzOtr6NHj9q9rtpsx/FM/XXhVknSmOua655rCY8AANdm86CNoqIiffvtt5o3b56WLl2qzp0764knntB9992n4OBgSdJXX32lhx56SE8++WSF2wkLC5Onp6dSU1NLLU9NTVVERES560yePFkPPPCAxowZI0nq1KmTcnJy9Mgjj+i5556Th0fZLOfr6ytfX19bdxOSTmXla+wHG5VfZNH1rRto0q3tHF0SAABXzebw06hRI1ksFutA465du5Zpc+ONNyo0NLTS7fj4+Cg2NlaJiYkaMmSIJMlisSgxMVHjx48vd53c3NwyAcfTs2QmYcMwbN0VVGLXiSz95bPNOpmZrxYNAvXWfd3k6WFydFkAAFw1m8PP66+/rrvvvlt+fn4VtgkNDdWhQ4cuu62EhASNGjVK3bt3V48ePTRjxgzl5ORo9OjRkqSRI0cqMjJS06ZNkyQNHjxY06dPV7du3RQXF6f9+/dr8uTJGjx4sDUE4epYLIbmrj6kfy3Zq0KzRQ2CfPXeqGt5KjsAoNawOfzcfvvtys3NLRN+zpw5Iy8vL+ulr6oYNmyYTp8+rSlTpiglJUVdu3bVkiVLrIOgjxw5Uqqn5/nnn5fJZNLzzz+v48ePq0GDBho8eLD+3//7f7buBsqRmpWvvy7cql/2pUmS4ts11Ct3dVb9Olw2BADUHibDxutFgwYN0uDBg/XYY4+VWj579mx9++23+v777+1aoL1lZWUpJCREmZmZNgW12m7JjhRNWrRNZ3OL5Oftocl/aK/7ejSVycSlLgCA49nz/G3z3V7r1q3TjTfeWGb5DTfcoHXr1l1VMah5uYXFmrRom/78UbLO5hapQ+Ng/e/xvhoRF03wAQDUSjZf9iooKFBxcXGZ5UVFRcrLy7NLUagZuYXFuvedtdp2LFMmk/RIvxg9dXMb+Xg5/JFvAABUG5vPcj169NA777xTZvns2bMVGxtrl6JQ/cwWQxM+26JtxzJVN8BbHz8cp0mD2hF8AAC1ns09P//4xz8UHx+vrVu36qabbpIkJSYmasOGDVq6dKndC0T1mPb9bi3blSofTw+9O7K7ujNrMwDATdj8v/l9+vRRUlKSoqKi9Pnnn+u7775Ty5YttW3bNvXt27c6aoSdfbT2sN5bVTIVwat3dyb4AADcis13e7k6d7/ba8XeU3r4/Y0yWww9dXNrPX5TK0eXBADAZdnz/G3zZa9L5efnq7CwsNQydwwUrmJPSpbGf7JZZouhu65povH9Wzq6JAAAapzNl71yc3M1fvx4NWzYUIGBgapbt26pF5zTqax8PTRvg84VFCuueT1NG9qJW9kBAG7J5vDz9NNPa/ny5Zo1a5Z8fX313nvv6cUXX1Tjxo31wQcfVEeNuEoZuYV6+P2NOpGZr5iwQL39QCx3dQEA3JbNl72+++47ffDBB7rhhhs0evRo9e3bVy1btlR0dLQ+/vhjjRgxojrqxBU6djZXo+au14HTOaob4K25D16r0AAfR5cFAIDD2Py//2fOnFFMTIykkvE9Z86ckSRdd911WrlypX2rw1XZdSJLQ/+7RgdO56hRiJ8W/KmXmoUFOrosAAAcyubwExMTY31ie9u2bfX5559LKukRCg0NtWtxuHJr9qdp2NtJOpVdoNbhdfTlo73VOjzI0WUBAOBwNoef0aNHa+vWrZKkiRMnaubMmfLz89OTTz6pp59+2u4Fwnbfbj2hUfPWK7ugWD2a19PCP/dW41B/R5cFAIBTuOp5fg4fPqzk5GS1bNlSnTt3tldd1aa2z/Pz2fojmrhouyTp1k4Rmn5PV/l5ezq4KgAAro7DnupeVFSkm266Sfv27bMui46O1tChQ10i+NR2m4+c1eRvdkiSRvWK1lvDryH4AADwOzbd7eXt7a1t27ZVVy24CmdzCjX+k80qMhsa1DFCL9zegXl8AAAoh81jfu6//37NmTOnOmrBFbJYDCV8vkXHM/LUrH6AXvljZ4IPAAAVsHmen+LiYs2dO1c//vijYmNjFRhY+tbp6dOn2604VM2snw/op72n5ePloZkjrlGwn7ejSwIAwGnZHH527Niha665RpL066+/lnqP3oaal3QgXa8t3StJeun2DurQOMTBFQEA4NxsDj8//fRTddSBK3AqO1+Pf7pZFkMaek2khl0b5eiSAABwejzgyUWZLYYmfLpFaedKJjH8x5CO9LwBAFAFNvf83HjjjZWeZJcvX35VBaFq/rN8v5IOpivAx1P/HRGrAB+bDyUAAG7J5jNm165dS/1cVFSkLVu2aMeOHRo1apS96kIlkg6k643EkvFW/xjSUS0b1nFwRQAAuA6bw8/rr79e7vIXXnhB586du+qCULn0cwWa8FnJOJ8/xjbR0GuaOLokAABcit3G/Nx///2aO3euvTaHclgshp5auFWnsgvUokGgXrqjg6NLAgDA5dgt/CQlJcnPz89em0M53v3loFbsPS3f8/P5MM4HAADb2Xz2HDp0aKmfDcPQyZMntXHjRk2ePNluhaG0Hccz9eoPJfP5vHB7B7WNqH0PZQUAoCbYHH5CQkpPoufh4aE2bdropZde0oABA+xWGC4yDEMvfLtTxZaS53bdy3w+AABcMZvDz7x586qjDlTi260ntPHwWfl7e2rK4PbM5wMAwFWweczPhg0btG7dujLL161bp40bN9qlKFyUU1Csad/vkSSNu7GFGoX4O7giAABcm83hZ9y4cTp69GiZ5cePH9e4cePsUhQu+u+K/UrJyldUPX+N6Rvj6HIAAHB5NoefXbt2WR9seqlu3bpp165ddikKJQ6n5+jdlYckSc/f1l5+3p4OrggAANdnc/jx9fVVampqmeUnT56Ulxe3XtvTPxbvVqHZor6twjSgfbijywEAoFawOfwMGDBAkyZNUmZmpnVZRkaG/va3v+nmm2+2a3HubONvZ7RsV6q8PEyayiBnAADsxuaumn//+9/q16+foqOj1a1bN0nSli1bFB4erg8//NDuBbqrt1celFTyCIuWDYMcXA0AALWHzeEnMjJS27Zt08cff6ytW7fK399fo0eP1vDhw+Xt7V0dNbqdA6fP6cfdJZcWGeQMAIB9XdEgncDAQD3yyCP2rgXnzVl1SIYhxbdryBPbAQCwM5vH/EybNq3cB5jOnTtXr7zyil2Kcmdp5wr0ZfIxSdIj/Vo4uBoAAGofm8PP22+/rbZt25ZZ3qFDB82ePdsuRbmzD5IOq6DYoi5Robq2WV1HlwMAQK1jc/hJSUlRo0aNyixv0KCBTp48aZei3FVeoVkfJv0mSXqkbwx3eAEAUA1sDj9RUVFavXp1meWrV69W48aN7VKUu/py0zGdzS1SVD1/DezAvD4AAFQHmwc8jx07Vk888YSKiorUv39/SVJiYqKeeeYZPfXUU3Yv0J18u+WEJGlkz2by8rQ5lwIAgCqwOfw8/fTTSk9P12OPPabCwkJJkp+fn5599llNmjTJ7gW6i7RzBdp4+IwkaVCnCAdXAwBA7WVz+DGZTHrllVc0efJk7d69W/7+/mrVqpV8fX2roz63sXz3KVkMqWNksJrUDXB0OQAA1FpX/DCuOnXq6Nprr7VnLW7th50pkqQB7en1AQCgOl1R+Nm4caM+//xzHTlyxHrp64JFixbZpTB3klNQrF/2p0mSBjDQGQCAamXzqNrPPvtMvXv31u7du/XVV1+pqKhIO3fu1PLlyxUSElIdNdZ6K389rcJii6LrB6hNOM/xAgCgOtkcfl5++WW9/vrr+u677+Tj46M33nhDe/bs0T333KOmTZtWR4213tJdJc/xGtA+nLl9AACoZjaHnwMHDui2226TJPn4+CgnJ0cmk0lPPvmk3nnnHbsXWNsVmS1KPP8Q0wEdGO8DAEB1szn81K1bV9nZ2ZJKnvC+Y8cOSVJGRoZyc3PtW50bWHfwjLLyi1U/0EfXNOVxFgAAVDebBzz369dPy5YtU6dOnXT33XdrwoQJWr58uZYtW6abbrqpOmqs1S7c5XVz+3B5enDJCwCA6mZz+PnPf/6j/Px8SdJzzz0nb29vrVmzRnfddZeef/55uxdYm2XnF+nrzcclSbd05JIXAAA1webwU69ePevfPTw8NHHiRLsW5E4+XX9E2QXFatmwjvq1auDocgAAcAs8QMpBCostmrPqkCTpkX4x8uCSFwAANYLw4yDfbDmu1KwChQf76o6ujR1dDgAAboPw4wAWi6F3Vh6UJD3Up7l8vTwdXBEAAO6D8OMAP/96WvtOnVMdXy8Nj2NiSAAAapLN4eenn36q8L2ZM2deVTHuYtH5O7zu7t5EwX7eDq4GAAD3YnP4GTp0qJKTk8ssf+ONNzRp0iS7FFWb5RWarTM6396FsT4AANQ0m8PPq6++qkGDBmnPnj3WZa+99pqmTJmixYsX27W42mj5nlPKLTSrSV1/dY0KdXQ5AAC4HZvn+RkzZozOnDmj+Ph4rVq1SgsWLNDLL7+s77//Xn369KmOGmuV/207IUm6rXMjHmIKAIAD2Bx+JOmZZ55Renq6unfvLrPZrB9++EE9e/a0d221Tk5BsZbvOSVJGtyZS14AADhClcLPm2++WWZZZGSkAgIC1K9fP61fv17r16+XJP3lL3+xb4W1yI+7U1VQbFF0/QB1aBzs6HIAAHBLJsMwjMs1at68edU2ZjLp4MGDNhcxc+ZMvfrqq0pJSVGXLl301ltvqUePHuW2veGGG/Tzzz+XWX7rrbdWacxRVlaWQkJClJmZqeDgmg0gI95bq9X70zX+xpb668A2NfrZAAC4Mnuev6vU83Po0KGr+pDKLFiwQAkJCZo9e7bi4uI0Y8YMDRw4UHv37lXDhg3LtF+0aJEKCwutP6enp6tLly66++67q61Ge/g1NVur96fLwyTd2yPK0eUAAOC2HD7J4fTp0zV27FiNHj1a7du31+zZsxUQEKC5c+eW275evXqKiIiwvpYtW6aAgIAKw09BQYGysrJKvRxh/prfJEkD2keoSd0Ah9QAAACuIPzcddddeuWVV8os/9e//mVz70thYaGSk5MVHx9/sSAPD8XHxyspKalK25gzZ47uvfdeBQYGlvv+tGnTFBISYn1FRdV8r0tmbpEWbTomSXqwT7Ma/3wAAHCRzeFn5cqVuvXWW8ssHzRokFauXGnTttLS0mQ2mxUeHl5qeXh4uFJSUi67/vr167Vjxw6NGTOmwjaTJk1SZmam9XX06FGbarSH/9txUvlFFrWNCFJc83o1/vkAAOAim291P3funHx8fMos9/b2rvFLSnPmzFGnTp0qHBwtSb6+vvL19a3Bqspaf+iMJCm+XThz+wAA4GA29/x06tRJCxYsKLP8s88+U/v27W3aVlhYmDw9PZWamlpqeWpqqiIiIipdNycnR5999pkefvhhmz7TEdb/VhJ+etDrAwCAw9nc8zN58mQNHTpUBw4cUP/+/SVJiYmJ+vTTT7Vw4UKbtuXj46PY2FglJiZqyJAhkiSLxaLExESNHz++0nUXLlyogoIC3X///bbuQo06kZGnY2fz5GGSromu6+hyAABwezaHn8GDB+vrr7/Wyy+/rC+++EL+/v7q3LmzfvzxR11//fU2F5CQkKBRo0ape/fu6tGjh2bMmKGcnByNHj1akjRy5EhFRkZq2rRppdabM2eOhgwZovr169v8mTVpw/len46RIarje0UTagMAADu6orPxbbfdpttuu80uBQwbNkynT5/WlClTlJKSoq5du2rJkiXWQdBHjhyRh0fpq3N79+7VqlWrtHTpUrvUUJ3WnR/v06MZl7wAAHAGVZrhuTap6RmeB7z+s35NPae3H4jVwA6Vj2MCAADlq/EZni9lNpv1+uuv6/PPP9eRI0dKzbYsSWfOnLmqgmqT7Pwi7Tt1TpJ0TVPG+wAA4AxsvtvrxRdf1PTp0zVs2DBlZmYqISFBQ4cOlYeHh1544YVqKNF1bT+WKcOQIkP91SDIsbfbAwCAEjaHn48//ljvvvuunnrqKXl5eWn48OF67733NGXKFK1du7Y6anRZm49mSJK6Ng11aB0AAOAim8NPSkqKOnXqJEmqU6eOMjMzJUl/+MMfqvRUdXey5Xz46RYV6tA6AADARTaHnyZNmujkyZOSpBYtWljvuNqwYYPDZ1J2Nlsv9PwQfgAAcBo2h58777xTiYmJkqTHH39ckydPVqtWrTRy5Eg99NBDdi/QVZ3OLtCp7AKZTFKHxiGOLgcAAJxn891e//znP61/HzZsmKKjo7VmzRq1atVKgwcPtmtxrmxvSrYkqXn9QPn7eDq4GgAAcIHN4WflypXq3bu3vLxKVu3Zs6d69uyp4uJirVy5Uv369bN7ka5oT0rJQ17bRAQ5uBIAAHApmy973XjjjeXO5ZOZmakbb7zRLkXVBnvO9/y0jaj+iRQBAEDV2Rx+DMOQyWQqszw9PV2BgYF2Kao2oOcHAADnVOXLXkOHDpUkmUwmPfjgg6Xu7DKbzdq2bZt69+5t/wpdkNliaF9qyczOhB8AAJxLlcNPSEjJHUuGYSgoKEj+/v7W93x8fNSzZ0+NHTvW/hW6oCNnclVQbJGft4ea1gtwdDkAAOASVQ4/8+bNkyQ1a9ZMTz/9tAICOKlX5MKdXq3Dg+TpUfYSIQAAcBybx/z8/PPPZR5mKpU8bbV///52KcrV/ZpaEn5aNeSSFwAAzsZu4Sc/P1+//PKLXYpydb+l50iSYhowABwAAGdT5cte27Ztk1Qy5mfXrl1KSUmxvmc2m7VkyRJFRkbav0IXdOxMniSpSV3/y7QEAAA1rcrhp2vXrjKZTDKZTOVe3vL399dbb71l1+Jc1bGzuZKkKAY7AwDgdKocfg4dOiTDMBQTE6P169erQYMG1vd8fHzUsGFDeXryGIfCYotOZuVLkqLqEn4AAHA2VQ4/0dHRkiSLxVJtxdQGqVn5MgzJx9NDYXV8HF0OAAD4HZuf7XXBrl27dOTIkTKDn2+//farLsqVZeYVSZLqBnqXOxM2AABwLJvDz8GDB3XnnXdq+/btMplMMgxDkqwnerPZbN8KXUxGbkn4CfWn1wcAAGdk863uEyZMUPPmzXXq1CkFBARo586dWrlypbp3764VK1ZUQ4mu5WxuSU9YSIC3gysBAADlsbnnJykpScuXL1dYWJg8PDzk4eGh6667TtOmTdNf/vIXbd68uTrqdBkZFy57EX4AAHBKNvf8mM1mBQWVzFwcFhamEydOSCoZEL137177VueCMs/3/HDZCwAA52Rzz0/Hjh21detWNW/eXHFxcfrXv/4lHx8fvfPOO4qJiamOGl3KmZzzY34C6fkBAMAZ2Rx+nn/+eeXklDy+4aWXXtIf/vAH9e3bV/Xr19eCBQvsXqCrOX2uQJLUoI6vgysBAADlsTn8DBw40Pr3li1bas+ePTpz5ozq1q3Lrd2S0rJLwk8Y4QcAAKd0xfP8XKpevXr22EytkHaO8AMAgDOzecAzKncmp2TAc31mdwYAwCkRfuzIMAxl5ZcMeA7xZ8AzAADOiPBjRwXFFhWZS2a8Dib8AADglAg/dpR1foJDD5MU6MMT7gEAcEaEHzu6cMkr2J+HmgIA4KwIP3aUlV8sSarja5eb6AAAQDUg/NhR9vnwE+THeB8AAJwV4ceOss9f9gryo+cHAABnRfixo3MXen647AUAgNMi/NjRuYKS8BNI+AEAwGkRfuyooNgiSfLz5p8VAABnxVnajvKLzJIkP2/m+AEAwFkRfuzoYs8P4QcAAGdF+LEja8+PF/+sAAA4K87SdlRQVNLz40P4AQDAaXGWtqMiM+EHAABnx1najgouhB9P/lkBAHBWnKXtqOj8gGdven4AAHBanKXt6MJlL296fgAAcFqcpe2okMteAAA4Pc7SdnQ2p+TBpiEBPNUdAABnRfixo7zz8/zwYFMAAJwX4ceOii0ll708PUwOrgQAAFSE8GNHZrMhSfLy4J8VAABnxVnajootJeGHnh8AAJwX4ceOzOfDj5cn4QcAAGdF+LGjCz0/HibCDwAAzorwY0eF52d49mWGZwAAnBZnaTtihmcAAJwfZ2k7KmbMDwAATo/wYyeGYVj/zpgfAACcF+HHTiwXs4+40x0AAOdF+LET8yXpx0TPDwAATovwYyeWUpe9HFgIAAColMPDz8yZM9WsWTP5+fkpLi5O69evr7R9RkaGxo0bp0aNGsnX11etW7fW999/X0PVVswoddmL9AMAgLNy6OPHFyxYoISEBM2ePVtxcXGaMWOGBg4cqL1796phw4Zl2hcWFurmm29Ww4YN9cUXXygyMlKHDx9WaGhozRf/OxceairxeAsAAJyZQ8PP9OnTNXbsWI0ePVqSNHv2bC1evFhz587VxIkTy7SfO3euzpw5ozVr1sjb21uS1KxZs5osuULF5otdP8zzAwCA83LYWbqwsFDJycmKj4+/WIyHh+Lj45WUlFTuOt9++6169eqlcePGKTw8XB07dtTLL78ss9lc4ecUFBQoKyur1Ks6FF8y4JmeHwAAnJfDwk9aWprMZrPCw8NLLQ8PD1dKSkq56xw8eFBffPGFzGazvv/+e02ePFmvvfaa/vGPf1T4OdOmTVNISIj1FRUVZdf9uODCPD/kHgAAnJtLXZ+xWCxq2LCh3nnnHcXGxmrYsGF67rnnNHv27ArXmTRpkjIzM62vo0ePVk9t5zt+GOwMAIBzc9iYn7CwMHl6eio1NbXU8tTUVEVERJS7TqNGjeTt7S1PT0/rsnbt2iklJUWFhYXy8fEps46vr698fX3tW3w5LtzqTvYBAMC5Oaznx8fHR7GxsUpMTLQus1gsSkxMVK9evcpdp0+fPtq/f78sl9xZ9euvv6pRo0blBp+adGGSQ3p+AABwbg697JWQkKB3331X77//vnbv3q1HH31UOTk51ru/Ro4cqUmTJlnbP/roozpz5owmTJigX3/9VYsXL9bLL7+scePGOWoXrC70/DDYGQAA5+bQW92HDRum06dPa8qUKUpJSVHXrl21ZMkS6yDoI0eOyMPjYj6LiorSDz/8oCeffFKdO3dWZGSkJkyYoGeffdZRu2DFmB8AAFyDybj0ceRuICsrSyEhIcrMzFRwcLDdtnvw9Dn1f+1nBfl5afsLA+22XQAAYN/zt0vd7eXMLiRI+n0AAHBuhB87udB/xhPdAQBwboQfu+FWdwAAXAHhx06sPT+OLQMAAFwG4cdOrGN+6PoBAMCpEX7shJ4fAABcA+HHTgzG/AAA4BIIP3ZycbYk0g8AAM6M8GMnFx5vwdMtAABwboQfO7k4z49j6wAAAJUj/NiZicteAAA4NcIPAABwK4QfO+OyFwAAzo3wYycX7/YCAADOjPBjJ9Z5fhxcBwAAqBzhx054qjsAAK6B8AMAANwK4QcAALgVwo+dMN4ZAADXQPgBAABuhfADAADcCuHHzrjZCwAA50b4AQAAboXwAwAA3Arhx04Mnm8BAIBLIPwAAAC3QvgBAABuhfADAADcCuHHzrjVHQAA50b4AQAAboXwAwAA3ArhBwAAuBXCDwAAcCuEHzthikMAAFwD4QcAALgVwg8AAHArhB8AAOBWCD8AAMCtEH4AAIBbIfwAAAC3QvgBAABuhfBjJ7kFZknS0TN5Dq4EAABUhvBjJ4u3n3B0CQAAoAoIP3Zye5dIR5cAAACqgPBjJ77eJf+UTesFOLgSAABQGcIPAABwK4QfAADgVgg/AADArRB+AACAWyH8AAAAt0L4AQAAboXwAwAA3ArhBwAAuBXCDwAAcCuEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+LGzrPwiR5cAAAAqQfixk19TsiVJGbmEHwAAnBnhx04OpeU4ugQAAFAFhB87GdAhwtElAACAKiD82InJVPJn03oBji0EAABUyinCz8yZM9WsWTP5+fkpLi5O69evr7Dt/PnzZTKZSr38/PxqsNryncjIkyQdOZPr4EoAAEBlHB5+FixYoISEBE2dOlWbNm1Sly5dNHDgQJ06darCdYKDg3Xy5Enr6/DhwzVYcfnGf7LZ0SUAAIAqcHj4mT59usaOHavRo0erffv2mj17tgICAjR37twK1zGZTIqIiLC+wsPDa7Di8k0c1NbRJQAAgCpwaPgpLCxUcnKy4uPjrcs8PDwUHx+vpKSkCtc7d+6coqOjFRUVpTvuuEM7d+6ssG1BQYGysrJKvarDn69vod/+eZt+++dt1bJ9AABgHw4NP2lpaTKbzWV6bsLDw5WSklLuOm3atNHcuXP1zTff6KOPPpLFYlHv3r117NixcttPmzZNISEh1ldUVJTd9wMAALgOh1/2slWvXr00cuRIde3aVddff70WLVqkBg0a6O233y63/aRJk5SZmWl9HT16tIYrBgAAzsTLkR8eFhYmT09PpaamllqempqqiIiqzZvj7e2tbt26af/+/eW+7+vrK19f36uuFQAA1A4O7fnx8fFRbGysEhMTrcssFosSExPVq1evKm3DbDZr+/btatSoUXWVCQAAahGH9vxIUkJCgkaNGqXu3burR48emjFjhnJycjR69GhJ0siRIxUZGalp06ZJkl566SX17NlTLVu2VEZGhl599VUdPnxYY8aMceRuAAAAF+Hw8DNs2DCdPn1aU6ZMUUpKirp27aolS5ZYB0EfOXJEHh4XO6jOnj2rsWPHKiUlRXXr1lVsbKzWrFmj9u3bO2oXAACACzEZhmE4uoialJWVpZCQEGVmZio4ONjR5QAAgCqw5/nb5e72AgAAuBqEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANyKwyc5rGkXpjXKyspycCUAAKCqLpy37TE9oduFn+zsbElSVFSUgysBAAC2ys7OVkhIyFVtw+1meLZYLDpx4oSCgoJkMpnsuu2srCxFRUXp6NGjtXr2aPaz9nGXfWU/axd32U/Jffa1sv00DEPZ2dlq3LhxqcdeXQm36/nx8PBQkyZNqvUzgoODa/Uv5wXsZ+3jLvvKftYu7rKfkvvsa0X7ebU9Phcw4BkAALgVwg8AAHArhB878vX11dSpU+Xr6+voUqoV+1n7uMu+sp+1i7vsp+Q++1pT++l2A54BAIB7o+cHAAC4FcIPAABwK4QfAADgVgg/AADArRB+bDRz5kw1a9ZMfn5+iouL0/r16yttv3DhQrVt21Z+fn7q1KmTvv/++xqq9MpMmzZN1157rYKCgtSwYUMNGTJEe/furXSd+fPny2QylXr5+fnVUMVX5oUXXihTc9u2bStdx9WO5QXNmjUrs68mk0njxo0rt72rHM+VK1dq8ODBaty4sUwmk77++utS7xuGoSlTpqhRo0by9/dXfHy89u3bd9nt2vodr26V7WdRUZGeffZZderUSYGBgWrcuLFGjhypEydOVLrNK/n9rwmXO6YPPvhgmbpvueWWy27XlY6ppHK/ryaTSa+++mqF23TGY1qV80l+fr7GjRun+vXrq06dOrrrrruUmppa6Xav9Lt9KcKPDRYsWKCEhARNnTpVmzZtUpcuXTRw4ECdOnWq3PZr1qzR8OHD9fDDD2vz5s0aMmSIhgwZoh07dtRw5VX3888/a9y4cVq7dq2WLVumoqIiDRgwQDk5OZWuFxwcrJMnT1pfhw8frqGKr1yHDh1K1bxq1aoK27risbxgw4YNpfZz2bJlkqS77767wnVc4Xjm5OSoS5cumjlzZrnv/+tf/9Kbb76p2bNna926dQoMDNTAgQOVn59f4TZt/Y7XhMr2Mzc3V5s2bdLkyZO1adMmLVq0SHv37tXtt99+2e3a8vtfUy53TCXplltuKVX3p59+Wuk2Xe2YSiq1fydPntTcuXNlMpl01113VbpdZzumVTmfPPnkk/ruu++0cOFC/fzzzzpx4oSGDh1a6Xav5LtdhoEq69GjhzFu3Djrz2az2WjcuLExbdq0ctvfc889xm233VZqWVxcnPGnP/2pWuu0p1OnThmSjJ9//rnCNvPmzTNCQkJqrig7mDp1qtGlS5cqt68Nx/KCCRMmGC1atDAsFku577vi8ZRkfPXVV9afLRaLERERYbz66qvWZRkZGYavr6/x6aefVrgdW7/jNe33+1me9evXG5KMw4cPV9jG1t9/RyhvX0eNGmXccccdNm2nNhzTO+64w+jfv3+lbVzhmP7+fJKRkWF4e3sbCxcutLbZvXu3IclISkoqdxtX+t3+PXp+qqiwsFDJycmKj4+3LvPw8FB8fLySkpLKXScpKalUe0kaOHBghe2dUWZmpiSpXr16lbY7d+6coqOjFRUVpTvuuEM7d+6sifKuyr59+9S4cWPFxMRoxIgROnLkSIVta8OxlEp+jz/66CM99NBDlT7Y1xWP56UOHTqklJSUUscsJCREcXFxFR6zK/mOO6PMzEyZTCaFhoZW2s6W339nsmLFCjVs2FBt2rTRo48+qvT09Arb1oZjmpqaqsWLF+vhhx++bFtnP6a/P58kJyerqKio1PFp27atmjZtWuHxuZLvdnkIP1WUlpYms9ms8PDwUsvDw8OVkpJS7jopKSk2tXc2FotFTzzxhPr06aOOHTtW2K5NmzaaO3euvvnmG3300UeyWCzq3bu3jh07VoPV2iYuLk7z58/XkiVLNGvWLB06dEh9+/ZVdnZ2ue1d/Vhe8PXXXysjI0MPPvhghW1c8Xj+3oXjYssxu5LvuLPJz8/Xs88+q+HDh1f68Etbf/+dxS233KIPPvhAiYmJeuWVV/Tzzz9r0KBBMpvN5bavDcf0/fffV1BQ0GUvBTn7MS3vfJKSkiIfH58yQf1y59ULbaq6Tnnc7qnuqLpx48Zpx44dl71u3KtXL/Xq1cv6c+/evdWuXTu9/fbb+vvf/17dZV6RQYMGWf/euXNnxcXFKTo6Wp9//nmV/g/LVc2ZM0eDBg1S48aNK2zjiscTJYOf77nnHhmGoVmzZlXa1lV//++9917r3zt16qTOnTurRYsWWrFihW666SYHVlZ95s6dqxEjRlz2pgNnP6ZVPZ/UFHp+qigsLEyenp5lRqGnpqYqIiKi3HUiIiJsau9Mxo8fr//973/66aef1KRJE5vW9fb2Vrdu3bR///5qqs7+QkND1bp16wprduVjecHhw4f1448/asyYMTat54rH88JxseWYXcl33FlcCD6HDx/WsmXLKu31Kc/lfv+dVUxMjMLCwiqs25WPqST98ssv2rt3r83fWcm5jmlF55OIiAgVFhYqIyOjVPvLnVcvtKnqOuUh/FSRj4+PYmNjlZiYaF1msViUmJhY6v+SL9WrV69S7SVp2bJlFbZ3BoZhaPz48frqq6+0fPlyNW/e3OZtmM1mbd++XY0aNaqGCqvHuXPndODAgQprdsVj+Xvz5s1Tw4YNddttt9m0nisez+bNmysiIqLUMcvKytK6desqPGZX8h13BheCz759+/Tjjz+qfv36Nm/jcr//zurYsWNKT0+vsG5XPaYXzJkzR7GxserSpYvN6zrDMb3c+SQ2Nlbe3t6ljs/evXt15MiRCo/PlXy3KyoOVfTZZ58Zvr6+xvz5841du3YZjzzyiBEaGmqkpKQYhmEYDzzwgDFx4kRr+9WrVxteXl7Gv//9b2P37t3G1KlTDW9vb2P79u2O2oXLevTRR42QkBBjxYoVxsmTJ62v3Nxca5vf7+eLL75o/PDDD8aBAweM5ORk49577zX8/PyMnTt3OmIXquSpp54yVqxYYRw6dMhYvXq1ER8fb4SFhRmnTp0yDKN2HMtLmc1mo2nTpsazzz5b5j1XPZ7Z2dnG5s2bjc2bNxuSjOnTpxubN2+23uX0z3/+0wgNDTW++eYbY9u2bcYdd9xhNG/e3MjLy7Nuo3///sZbb71l/fly33FHqGw/CwsLjdtvv91o0qSJsWXLllLf2YKCAus2fr+fl/v9d5TK9jU7O9v461//aiQlJRmHDh0yfvzxR+Oaa64xWrVqZeTn51u34erH9ILMzEwjICDAmDVrVrnbcIVjWpXzyZ///GejadOmxvLly42NGzcavXr1Mnr16lVqO23atDEWLVpk/bkq3+3LIfzY6K233jKaNm1q+Pj4GD169DDWrl1rfe/66683Ro0aVar9559/brRu3drw8fExOnToYCxevLiGK7aNpHJf8+bNs7b5/X4+8cQT1n+T8PBw49ZbbzU2bdpU88XbYNiwYUajRo0MHx8fIzIy0hg2bJixf/9+6/u14Vhe6ocffjAkGXv37i3znqsez59++qnc39UL+2KxWIzJkycb4eHhhq+vr3HTTTeV2f/o6Ghj6tSppZZV9h13hMr289ChQxV+Z3/66SfrNn6/n5f7/XeUyvY1NzfXGDBggNGgQQPD29vbiI6ONsaOHVsmxLj6Mb3g7bffNvz9/Y2MjIxyt+EKx7Qq55O8vDzjscceM+rWrWsEBAQYd955p3Hy5Mky27l0nap8ty/HdH7DAAAAboExPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANwK4QcAALgVwg8AAHArhB8AAOBWCD8Aasxvv/0mk8mkLVu2OLoUm82fP1+hoaF2bwug5hF+ANSYqKgonTx5Uh07dnR0KTYbNmyYfv31V0eXAcAOvBxdAAD3UFhYKB8fH0VERDi6FJsVFRXJ399f/v7+ji4FgB3Q8wPAZjfccIPGjx+v8ePHKyQkRGFhYZo8ebIufVRgs2bN9Pe//10jR45UcHCwHnnkkVKXvSwWi5o0aaJZs2aV2vbmzZvl4eGhw4cPS5KmT5+uTp06KTAwUFFRUXrsscd07ty5UuusXr1aN9xwgwICAlS3bl0NHDhQZ8+e1QcffKD69euroKCgVPshQ4bogQceKHffLtS4YMECXX/99fLz89PHH39c5lLW1q1bdeONNyooKEjBwcGKjY3Vxo0by93m6dOn1b17d915551lagFQ8wg/AK7I+++/Ly8vL61fv15vvPGGpk+frvfee69Um3//+9/q0qWLNm/erMmTJ5d6z8PDQ8OHD9cnn3xSavnHH3+sPn36KDo62truzTff1M6dO/X+++9r+fLleuaZZ6ztt2zZoptuuknt27dXUlKSVq1apcGDB8tsNuvuu++W2WzWt99+a21/6tQpLV68WA899FCl+zdx4kRNmDBBu3fv1sCBA8u8P2LECDVp0kQbNmxQcnKyJk6cKG9v7zLtjh49qr59+6pjx4764osv5OvrW+nnAqgBV/ysegBu6/rrrzfatWtnWCwW67Jnn33WaNeunfXn6OhoY8iQIaXWO3TokCHJ2Lx5s2EYhrF582bDZDIZhw8fNgzDMMxmsxEZGWnMmjWrws9euHChUb9+fevPw4cPN/r06VNh+0cffdQYNGiQ9efXXnvNiImJKVV7eTXOmDGj1PJ58+YZISEh1p+DgoKM+fPnl7uNC2337NljREVFGX/5y18q/DwANY+eHwBXpGfPnjKZTNafe/XqpX379slsNluXde/evdJtdO3aVe3atbP2/vz88886deqU7r77bmubH3/8UTfddJMiIyMVFBSkBx54QOnp6crNzZV0seenImPHjtXSpUt1/PhxSSV3Yj344IOlai/P5WpPSEjQmDFjFB8fr3/+8586cOBAqffz8vLUt29fDR06VG+88cZlPw9AzSH8AKg2gYGBl20zYsQIa/j55JNPdMstt6h+/fqSSsbf/OEPf1Dnzp315ZdfKjk5WTNnzpRUMoBa0mUHIXfr1k1dunTRBx98oOTkZO3cuVMPPvjgVdf+wgsvaOfOnbrtttu0fPlytW/fXl999ZX1fV9fX8XHx+t///ufNXgBcA6EHwBXZN26daV+Xrt2rVq1aiVPT0+btnPfffdpx44dSk5O1hdffKERI0ZY30tOTpbFYtFrr72mnj17qnXr1jpx4kSp9Tt37qzExMRKP2PMmDGaP3++5s2bp/j4eEVFRdlUY0Vat26tJ598UkuXLtXQoUM1b94863seHh768MMPFRsbqxtvvLFM3QAch/AD4IocOXJECQkJ2rt3rz799FO99dZbmjBhgs3badasmXr37q2HH35YZrNZt99+u/W9li1bqqioSG+99ZYOHjyoDz/8ULNnzy61/qRJk7RhwwY99thj2rZtm/bs2aNZs2YpLS3N2ua+++7TsWPH9O677152oHNV5OXlafz48VqxYoUOHz6s1atXa8OGDWrXrl2pdp6envr444/VpUsX9e/fXykpKVf92QCuHuEHwBUZOXKk8vLy1KNHD40bN04TJkzQI488ckXbGjFihLZu3ao777yz1GWsLl26aPr06XrllVfUsWNHffzxx5o2bVqpdVu3bq2lS5dq69at6tGjh3r16qVvvvlGXl4XpzELCQnRXXfdpTp16mjIkCFXVOOlPD09lZ6erpEjR6p169a65557NGjQIL344otl2np5eenTTz9Vhw4d1L9/f506deqqPx/A1TEZxiUTcwBAFdxwww3q2rWrZsyY4ehSquymm25Shw4d9Oabbzq6FAAOxgzPAGq1s2fPasWKFVqxYoX++9//OrocAE6A8AOgVuvWrZvOnj2rV155RW3atHF0OQCcAJe9AACAW2HAMwAAcCuEHwAA4FYIPwAAwK0QfgAAgFsh/AAAALdC+AEAAG6F8AMAANwK4QcAALiV/w+aIdaL3oII1AAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import pandas as pd\n",
    "df=pd.DataFrame({'xvalues': X_axi, 'yvalues': Y_axi })\n",
    " \n",
    "# plot\n",
    "plt.plot( 'xvalues', 'yvalues', data=df)\n",
    "plt.xlabel(\"privacy risk\")\n",
    "plt.ylabel(\"attack accuracy\")\n",
    "# show the graph\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2ee4f5ad-e959-4f63-bde3-78c7e25fc0e4",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "48d74a89-2c75-453a-a3b2-3fd906337030",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "025ed341-77fd-4f0e-b2cf-1ac59f0e74af",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "id": "8e1a4ee1-0c47-411a-b98b-d7713d949e09",
   "metadata": {},
   "source": [
    "### 绘制损失分布差异"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "id": "ecd477a2-9df6-4a4a-ba78-716c2d669d45",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 101,
   "id": "2abddba4-ae48-405a-beb5-c45daf231d6d",
   "metadata": {},
   "outputs": [],
   "source": [
    "idx = 31000"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 102,
   "id": "0421a068-0678-435b-86b7-9f37862cc816",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 需要使用loss_data_all，pri_risk_rank，train_keep\n",
    "dat_in = []\n",
    "dat_out = []\n",
    "for i in range(loss_data_all.shape[1]):\n",
    "    dat_in.append((loss_data_all[train_keep[:,i],i]))\n",
    "    dat_out.append((loss_data_all[~train_keep[:,i],i]))\n",
    "dat_in = np.array(dat_in)\n",
    "dat_out = np.array(dat_out)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 103,
   "id": "37c2030e-d91f-4fdf-9a5a-e246ca594916",
   "metadata": {},
   "outputs": [],
   "source": [
    "mem1 = dat_in[pri_risk_rank[idx]]\n",
    "non_mem1 = dat_out[pri_risk_rank[idx]]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 104,
   "id": "5612c786-26df-4ff0-9c42-2af62fa46b0f",
   "metadata": {},
   "outputs": [],
   "source": [
    "mem1 = mem1.reshape(mem1.shape[0], 1)\n",
    "non_mem1 = non_mem1.reshape(non_mem1.shape[0], 1)\n",
    "\n",
    "arr = np.concatenate((mem1, non_mem1), 1)\n",
    "\n",
    "df = pd.DataFrame(arr, columns=['loss','out'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 105,
   "id": "5a3fd823-c33a-4c78-8829-e7bb6410bc02",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjIAAAG1CAYAAADjkR6kAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAmCUlEQVR4nO3df3DU9Z3H8dfm52YhCZDw24REBtINipEfYZRqoXJQLFaunV6dEYv0ij1KRUxFjTUi1JqWq5jRIhbuKnbUw2ur1HGQHqZ4tICCeFSRFeEMhuFXukLYkDUh2f3eHzZ7jSQh2ezu9/sJz8fMTtnv/vi+v/tt4Ol+v7txWZZlCQAAwEBJdg8AAAAQLUIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLFS7B4g3sLhsI4fP67MzEy5XC67xwEAAN1gWZYaGho0YsQIJSV1/r5Lnw+Z48ePKy8vz+4xAABAFI4eParLLrus09v7fMhkZmZK+uyFyMrKsnkaAADQHYFAQHl5eZF/xzvT50Om7XBSVlYWIQMAgGEudloIJ/sCAABjETIAAMBYff7QEgAAdguFQmppabF7DEdJTU1VcnJyr5+HkAEAIE4sy9LJkydVX19v9yiONGDAAA0bNqxXX49CyAAAECdtETNkyBB5PB6+z+xvLMtSMBhUXV2dJGn48OFRPxchAwBAHIRCoUjE5OTk2D2O42RkZEiS6urqNGTIkKgPM3GyLwAAcdB2TozH47F5Eudqe216c/4QIQMAQBxxOKlzsXhtCBkAAGAszpEBACDBamtr5ff7E7a+3Nxc5efnJ2x9iUTIAACQQLW1tSoq8qqpKZiwdbrdHh086Ot2zEybNk0lJSWqqqqK72AxQMgAAJBAfr9fTU1Beb3PyePxxn19waBPPt88+f3+bofMSy+9pNTU1DhPFhuEDAAANvB4vMrMnGD3GB0aNGiQ3SN0GyEDfE6ij13bpS8fMwfQO39/aKmgoEB33HGHDh8+rN/85jcaOHCgHnzwQd1xxx12jymJkAHasePYtV16eswcwKXrscce049//GM98MAD+u1vf6tFixbpS1/6koqKiuwejZAB/l6ij13bJZpj5gAuXTfeeKO+//3vS5Luu+8+Pf7449q2bRshAziVk49dA0CijR8/PvJnl8ulYcOGRX5Pkt34QjwAANClz3+CyeVyKRwO2zRNe4QMAAAwFoeWAACwQTDo61PrsQshAwBAAuXm5srt9sjnm5ewdbrdHuXm5iZsfYlEyAAAkED5+fk6eNDn6N+19MYbb0T+fOTIkQtu37dvX++HihFCBgCABMvPz+erD2KEk30BAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIvvkQEAIMFqa2sd/YV4JiFkAABIoNraWnmLihRsakrYOj1ut3wHDyYsZh5++GFt2rQpId8ATMgAAJBAfr9fwaYmPef1yuvxxH19vmBQ83yf/UqEvviuDCEDAIANvB6PJmRm2j1Gh5qbm7Vs2TJt3LhRgUBAkyZN0uOPP67Jkydrw4YNWrp0qerr6yP337Rpk/7xH/9RlmVpw4YNWrFihSTJ5XJJkp555hndfvvtcZmVkAEAAO3ce++9+t3vfqdnn31Wo0aN0qpVqzRr1iwdPnz4oo/91re+pf3792vLli16/fXXJUnZ2dlxm5WQAQAAEY2NjVq7dq02bNig2bNnS5LWr1+vrVu36t///d81ePDgLh+fkZGh/v37KyUlRcOGDYv7vHz8GgAARPzv//6vWlpaNHXq1Miy1NRUlZaWyufz2ThZxwgZAADQbUlJSbIsq92ylpYWm6YhZAAAwN8ZPXq00tLStGPHjsiylpYW7dmzR8XFxRo8eLAaGhrU2NgYuf3zH7NOS0tTKBRKyLycIwMAACL69eunRYsWadmyZRo0aJDy8/O1atUqBYNB/fM//7Msy5LH49EDDzygJUuW6K233tKGDRvaPUdBQYFqamq0b98+XXbZZcrMzFR6enpc5iVkAACwgS8YdOx6fvrTnyocDuu2225TQ0ODJk2apD/84Q8aOHCgJOm5557TsmXLtH79et1www16+OGHdccdd0Qe/41vfEMvvfSSpk+frvr6ej5+DQBAX5GbmyuP2615CTxx1uN2Kzc3t9v3d7vdeuKJJ/TEE090ePvcuXM1d+7cdssWLlwY+XN6erp++9vfRjVrT9kaMtu3b9e//uu/au/evTpx4oRefvnldi+MZVlavny51q9fr/r6ek2dOlVr167VmDFj7BsaAIBeyM/Pl+/gQX7XUozYGjKNjY266qqr9J3vfEdf//rXL7h91apVeuKJJ/Tss8+qsLBQFRUVmjVrlg4cOCC3223DxAAA9F5+fn6fDYtEszVkZs+eHfmync+zLEtVVVV68MEHdfPNN0uSfv3rX2vo0KHatGmTbrnllkSOCgAAHMixH7+uqanRyZMnNWPGjMiy7OxsTZkyRbt27er0cc3NzQoEAu0uAACgb3JsyJw8eVKSNHTo0HbLhw4dGrmtI5WVlcrOzo5c8vLy4jonAACwj2NDJlrl5eU6e/Zs5HL06FG7RwIAXMI+/y24+H+xeG0cGzJtv2jq1KlT7ZafOnWqy19ClZ6erqysrHYXAAASLTU1VZIUTND3xZio7bVpe62i4djvkSksLNSwYcNUXV2tkpISSVIgENBbb72lRYsW2TscAAAXkZycrAEDBqiurk6S5PF45HK5bJ7KGSzLUjAYVF1dnQYMGKDk5OSon8vWkDl37pwOHz4cud72dcZtX4m8dOlSPfLIIxozZkzk49cjRoy44Et4AABworYjCG0xg/YGDBjQ5VGW7rA1ZN5++21Nnz49cr2srEySNH/+fG3YsEH33nuvGhsbdccdd6i+vl5f/OIXtWXLFr5DBgBgBJfLpeHDh2vIkCG2/oZoJ0pNTe3VOzFtbA2ZadOmdXmij8vl0sqVK7Vy5coETgUAQGwlJyfH5B9tXMixJ/sCAABcDCEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWCl2D2Cy2tpa+f1+u8dIiNzcXOXn59s9BgAA7RAyUaqtrVVRkVdNTUG7R0kIt9ujgwd9xAwAwFEImSj5/X41NQXl9T4nj8dr9zhxFQz65PPNk9/vJ2QAAI5CyPSSx+NVZuYEu8cAAOCSxMm+AADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWI4OmVAopIqKChUWFiojI0OjR4/Wj3/8Y1mWZfdoAADAAVLsHqArP/vZz7R27Vo9++yzGjdunN5++20tWLBA2dnZWrJkid3jAQAAmzk6ZHbu3Kmbb75ZX/3qVyVJBQUF+o//+A/t3r3b5skAAIATODpkrr32Wq1bt04ffvihxo4dq7/85S/685//rNWrV3f6mObmZjU3N0euBwKBRIx6yaqtrZXf77d7jJjx+XySpGDQ1637p6bmyu3Oj+dIcLi+9jMQjdzcXOXn83MAezg6ZO6//34FAgF94QtfUHJyskKhkH7yk5/o1ltv7fQxlZWVWrFiRQKnvHTV1tbKW1SkYFOT3aPEnM83r1v3S0lya1LpQWLmEtWXfwZ6wuN2y3fwIDEDWzg6ZP7zP/9Tzz//vF544QWNGzdO+/bt09KlSzVixAjNnz+/w8eUl5errKwscj0QCCgvLy9RI19S/H6/gk1Nes7rldfjsXucmGgMBnXA51O/DK+SkrvepppwUA8GfWpp8RMyl6i++DPQU75gUPN8Pvn9fkIGtnB0yCxbtkz333+/brnlFknSlVdeqY8//liVlZWdhkx6errS09MTOeYlz+vxaEJmpt1jxESDpFZJmckeJadcZJtaEzERTNCXfgYA0zj649fBYFBJSe1HTE5OVjgctmkiAADgJI5+R+amm27ST37yE+Xn52vcuHH6n//5H61evVrf+c537B4NAAA4gKND5sknn1RFRYW+//3vq66uTiNGjND3vvc9PfTQQ3aPBgAAHMDRIZOZmamqqipVVVXZPQoAAHAgR58jAwAA0BVCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLFS7B4AMF0w6LN7hB5rm9nn6/3szc3NSk9P7/XzxNOJEydUX18f8+etqamRJDUGg2qI+bNHJzU1VW632+4xLim1tbXy+/12j2Gb3Nxc5efn27Z+QgaIkj98XkmSfL55do8StXnzej97kqRw70cx2gGfT612D/E3SUlJmlJaSswkSG1trbxFRQo2Ndk9im08brd8Bw/aFjOEDBClBqtVYUkr0ws0OjXH7nF6JBwKqvFTn4q9XvXzeKJ+ns2ffKKKI0e0vqBAE3Kc+Ro0BoM64PPJ7S5Qsisjps+9o7VeT7eckNtdoMwU+7c/FA4qGPSppaWFkEkQv9+vYFOTnvN65e3Fz5KpfMGg5vl88vv9hAxgqsKkDHlTMu0eo0dCkhoklXg8ysyMfnZfMChJKsrI0IRePE88NUhqlZSZkqPkGO+njyWp5YSSXRkxf+6oOOVtoUuQ1+Nx7M9AX8fJvgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwVlQhc/nll+uTTz65YHl9fb0uv/zyXg8FAADQHVGFzJEjRxQKhS5Y3tzcrGPHjvV6KAAAgO5I6cmdX3nllcif//CHPyg7OztyPRQKqbq6WgUFBTEbDgAAoCs9Cpm5c+dKklwul+bPn9/uttTUVBUUFOixxx6L2XAAAABd6VHIhMNhSVJhYaH27Nmj3NzcuAwFAADQHT0KmTY1NTWxngMAAKDHogoZSaqurlZ1dbXq6uoi79S0+dWvftXrwQAAAC4mqk8trVixQjNnzlR1dbX8fr/OnDnT7hJLx44d07x585STk6OMjAxdeeWVevvtt2O6DgAAYKao3pF5+umntWHDBt12222xnqedM2fOaOrUqZo+fbpee+01DR48WIcOHdLAgQPjul4AAGCGqELm/Pnzuvbaa2M9ywV+9rOfKS8vT88880xkWWFhYdzXCwAAzBDVoaXvfve7euGFF2I9ywVeeeUVTZo0Sd/85jc1ZMgQXX311Vq/fn2Xj2lublYgEGh3AQAAfVNU78g0NTVp3bp1ev311zV+/Hilpqa2u3316tUxGe6jjz7S2rVrVVZWpgceeEB79uzRkiVLlJaWdsH32LSprKzUihUrYrJ+AADgbFGFzLvvvquSkhJJ0v79+9vd5nK5ej1Um3A4rEmTJunRRx+VJF199dXav3+/nn766U5Dpry8XGVlZZHrgUBAeXl5MZsJAAA4R1Qhs23btljP0aHhw4eruLi43TKv16vf/e53nT4mPT1d6enp8R4NAAA4QFTnyCTK1KlTdfDgwXbLPvzwQ40aNcqmiQAAgJNE9Y7M9OnTuzyE9Mc//jHqgf7e3XffrWuvvVaPPvqo/umf/km7d+/WunXrtG7dupg8PwAAMFtUIdN2fkyblpYW7du3T/v37+/03JVoTJ48WS+//LLKy8u1cuVKFRYWqqqqSrfeemvM1gEAAMwVVcg8/vjjHS5/+OGHde7cuV4N9Hlz5szRnDlzYvqcAACgb4jpOTLz5s3j9ywBAICEiWnI7Nq1S263O5ZPCQAA0KmoDi19/etfb3fdsiydOHFCb7/9tioqKmIyGAAAwMVEFTLZ2dntriclJamoqEgrV67UzJkzYzIYAADAxUQVMn//SxwBAADsElXItNm7d698Pp8kady4cbr66qtjMhQAAEB3RBUydXV1uuWWW/TGG29owIABkqT6+npNnz5dGzdu1ODBg2M5IwAAQIei+tTSnXfeqYaGBr3//vs6ffq0Tp8+rf379ysQCGjJkiWxnhEAAKBDUb0js2XLFr3++uvyer2RZcXFxVqzZg0n+wIAgISJ6h2ZcDis1NTUC5anpqYqHA73eigAAIDuiCpkvvzlL+uuu+7S8ePHI8uOHTumu+++WzfccEPMhgMAAOhKVCHzi1/8QoFAQAUFBRo9erRGjx6twsJCBQIBPfnkk7GeEQAAoENRnSOTl5end955R6+//ro++OADSZLX69WMGTNiOhwAAEBXevSOzB//+EcVFxcrEAjI5XLpH/7hH3TnnXfqzjvv1OTJkzVu3Dj96U9/itesAAAA7fQoZKqqqrRw4UJlZWVdcFt2dra+973vafXq1TEbDgAAoCs9Cpm//OUv+spXvtLp7TNnztTevXt7PRQAAEB39ChkTp061eHHrtukpKTor3/9a6+HAgAA6I4ehczIkSO1f//+Tm9/9913NXz48F4PBQAA0B09Cpkbb7xRFRUVampquuC2Tz/9VMuXL9ecOXNiNhwAAEBXevTx6wcffFAvvfSSxo4dqx/84AcqKiqSJH3wwQdas2aNQqGQfvSjH8VlUAAAgM/rUcgMHTpUO3fu1KJFi1ReXi7LsiRJLpdLs2bN0po1azR06NC4DAoAAPB5Pf5CvFGjRmnz5s06c+aMDh8+LMuyNGbMGA0cODAe8wEAAHQqqm/2laSBAwdq8uTJsZwFAACgR6L6XUsAAABOEPU7MvhMMOize4S4a9vGzZs3y+f7/+2tqamRJDUGg2qwZbLYawwG7R4BQDfU1tbK7/fbPUbk78R4/j2Ympoqt9sdp2c3HyETpRMnTihJks83z+5REqaioqLD5Qd8PrUmeJZ4C1uWku0eAkCHamtrVVTkVVOTc/7DI55/DyYlJWlKaSkx0wlCJkr19fUKS1qZXqDRqTl2jxNXLa2fqKnpiNLTRikl2RNZvqO1Xk+3nJDbXaDMlL7xGrS0nlZTU03kE3kAnMfv96upKSiv9zl5PF5bZwkGffL55qlfhleZf/f3Y6yEwkEFgz61tLQQMp0gZHqpMClD3pRMu8eIq/PhoIKSPCmDlJaWHVn+sSS1nFCyK0PJfeQ1CIWd8194ALrm8XiVmTnB7jEkSUnJnvj8PdjX3u6OA072BQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxjIqZH7605/K5XJp6dKldo8CAAAcwJiQ2bNnj375y19q/Pjxdo8CAAAcwoiQOXfunG699VatX79eAwcOtHscAADgECl2D9Adixcv1le/+lXNmDFDjzzySJf3bW5uVnNzc+R6IBCI93iAsRqDwV49/tNPP438b0NDQyxGirnebiMAZ3N8yGzcuFHvvPOO9uzZ0637V1ZWasWKFXGeCjBb2DovSfL5fL16npq2/z1yRKlHjvRuqDgLW5aS7R4CQMw5OmSOHj2qu+66S1u3bpXb7e7WY8rLy1VWVha5HggElJeXF68RASNZVqskye0uUmpK/6ifJ6PlE6n5iNzuAmWm5MRqvJhqaT2tpqYaWZZl9ygA4sDRIbN3717V1dVpwoQJkWWhUEjbt2/XL37xCzU3Nys5uf1/Y6Wnpys9PT3RowJGSkryKDklM+rHu0KfHbZJdmX06nniKRTm0BLQlzk6ZG644Qa999577ZYtWLBAX/jCF3TfffddEDEAAODS4uiQyczM1BVXXNFuWb9+/ZSTk3PBcgAAcOkx4uPXAAAAHXH0OzIdeeONN+weAQAAOATvyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYzk6ZCorKzV58mRlZmZqyJAhmjt3rg4ePGj3WAAAwCEcHTL//d//rcWLF+vNN9/U1q1b1dLSopkzZ6qxsdHu0QAAgAOk2D1AV7Zs2dLu+oYNGzRkyBDt3btX119/vU1TAQAAp3B0yHze2bNnJUmDBg3q9D7Nzc1qbm6OXA8EAnGfCwCcpDEYTPi6Nm/eLJ/Pl5B11tTUSJKCwcSsrytOmOFSZ0zIhMNhLV26VFOnTtUVV1zR6f0qKyu1YsWKBE4GAM4Qts5LUsKCQpLe1GfnKFRUVCRsnW18vnkJX2dnrL+99kg8Y0Jm8eLF2r9/v/785z93eb/y8nKVlZVFrgcCAeXl5cV7PACwnWW1SpLc7iKlpvRPyDpDLZ8o3HxED6eN0Ojk7ISsszV0Vs3njyd0Ozuzo/UTPdV0RFa41dY5LmVGhMwPfvADvfrqq9q+fbsuu+yyLu+bnp6u9PT0BE0GAM6TlORRckpmQtblCn12aGl0crbGpQ9NyDrPn5eC54/Lk+RRWoK2szM1ocQdxkPHHB0ylmXpzjvv1Msvv6w33nhDhYWFdo8EAAAcxNEhs3jxYr3wwgv6/e9/r8zMTJ08eVKSlJ2drYyMDJunAwAAdnP098isXbtWZ8+e1bRp0zR8+PDI5cUXX7R7NAAA4ACOfkfGsiy7RwAAAA7m6HdkAAAAukLIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMRcgAAABjETIAAMBYhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDAAAMBYhAwAAjEXIAAAAYxEyAADAWIQMAAAwFiEDAACMZUTIrFmzRgUFBXK73ZoyZYp2795t90gAAMABHB8yL774osrKyrR8+XK98847uuqqqzRr1izV1dXZPRoAALCZ40Nm9erVWrhwoRYsWKDi4mI9/fTT8ng8+tWvfmX3aAAAwGYpdg/QlfPnz2vv3r0qLy+PLEtKStKMGTO0a9euDh/T3Nys5ubmyPWzZ89KkgKBQExnCwaDkqT3W8/oUysU0+d2mtZQQM2S0lv8Sgk3RpZ/1PrZa9uXXoPOtrUjJm9/T7azKya8BrHa1o44bfvjua2dseM1sGM7OxPv7Q9ZTWqS1NzQIE/I/v+Pfd7Bv/1beO7cuZj/O9v2fJZldX1Hy8GOHTtmSbJ27tzZbvmyZcus0tLSDh+zfPlySxIXLly4cOHCpQ9cjh492mUrOPodmWiUl5errKwscj0cDuv06dPKycmRy+WycTIzBAIB5eXl6ejRo8rKyrJ7HMQA+7TvYZ/2LezPjlmWpYaGBo0YMaLL+zk6ZHJzc5WcnKxTp061W37q1CkNGzasw8ekp6crPT293bIBAwbEa8Q+Kysrix+oPoZ92vewT/sW9ueFsrOzL3ofR5/sm5aWpokTJ6q6ujqyLBwOq7q6Wtdcc42NkwEAACdw9DsyklRWVqb58+dr0qRJKi0tVVVVlRobG7VgwQK7RwMAADZzfMh861vf0l//+lc99NBDOnnypEpKSrRlyxYNHTrU7tH6pPT0dC1fvvyCw3MwF/u072Gf9i3sz95xWdbFPtcEAADgTI4+RwYAAKArhAwAADAWIQMAAIxFyAAAAGMRMgAAwFiEDHqlpqZG06dPV3Fxsa688ko1Ntr7C9zQe8FgUKNGjdI999xj9yjopaNHj2ratGkqLi7W+PHj9Zvf/MbukdBDr776qoqKijRmzBj927/9m93jOBIfv0avfOlLX9Ijjzyi6667TqdPn1ZWVpZSUhz/9UTowo9+9CMdPnxYeXl5+vnPf273OOiFEydO6NSpUyopKdHJkyc1ceJEffjhh+rXr5/do6EbWltbVVxcrG3btik7O1sTJ07Uzp07lZOTY/dojsI7Moja+++/r9TUVF133XWSpEGDBhExhjt06JA++OADzZ492+5REAPDhw9XSUmJJGnYsGHKzc3V6dOn7R0K3bZ7926NGzdOI0eOVP/+/TV79mz913/9l91jOQ4h04dt375dN910k0aMGCGXy6VNmzZdcJ81a9aooKBAbrdbU6ZM0e7du7v9/IcOHVL//v110003acKECXr00UdjOD0+L977U5LuueceVVZWxmhiXEwi9mmbvXv3KhQKKS8vr5dTo7t6u3+PHz+ukSNHRq6PHDlSx44dS8ToRiFk+rDGxkZdddVVWrNmTYe3v/jiiyorK9Py5cv1zjvv6KqrrtKsWbNUV1cXuU9JSYmuuOKKCy7Hjx9Xa2ur/vSnP+mpp57Srl27tHXrVm3dujVRm3fJiff+/P3vf6+xY8dq7NixidqkS16892mb06dP69vf/rbWrVsX923C/4vF/kU3WLgkSLJefvnldstKS0utxYsXR66HQiFrxIgRVmVlZbeec+fOndbMmTMj11etWmWtWrUqJvOia/HYn/fff7912WWXWaNGjbJycnKsrKwsa8WKFbEcG12Ixz61LMtqamqyrrvuOuvXv/51rEZFFKLZvzt27LDmzp0buf2uu+6ynn/++YTMaxLekblEnT9/Xnv37tWMGTMiy5KSkjRjxgzt2rWrW88xefJk1dXV6cyZMwqHw9q+fbu8Xm+8RkYXYrE/KysrdfToUR05ckQ///nPtXDhQj300EPxGhkXEYt9almWbr/9dn35y1/WbbfdFq9REYXu7N/S0lLt379fx44d07lz5/Taa69p1qxZdo3sWITMJcrv9ysUCl3wW8SHDh2qkydPdus5UlJS9Oijj+r666/X+PHjNWbMGM2ZMyce4+IiYrE/4Syx2Kc7duzQiy++qE2bNqmkpEQlJSV677334jEueqg7+zclJUWPPfaYpk+frpKSEv3whz/kE0sd4CMm6JXZs2fzCZc+6Pbbb7d7BMTAF7/4RYXDYbvHQC987Wtf09e+9jW7x3A03pG5ROXm5io5OVmnTp1qt/zUqVMaNmyYTVMhWuzPvod92rexf2OHkLlEpaWlaeLEiaquro4sC4fDqq6u1jXXXGPjZIgG+7PvYZ/2bezf2OHQUh927tw5HT58OHK9pqZG+/bt06BBg5Sfn6+ysjLNnz9fkyZNUmlpqaqqqtTY2KgFCxbYODU6w/7se9infRv7N0Hs/tgU4mfbtm2WpAsu8+fPj9znySeftPLz8620tDSrtLTUevPNN+0bGF1if/Y97NO+jf2bGPyuJQAAYCzOkQEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsQgZAABgLEIGAAAYi5AB4CjTpk3T0qVL7R4DgCEIGQAAYCxCBgAAGIuQAeBYZ86c0be//W0NHDhQHo9Hs2fP1qFDhyK3f/zxx7rppps0cOBA9evXT+PGjdPmzZsjj7311ls1ePBgZWRkaMyYMXrmmWfs2hQAcZJi9wAA0Jnbb79dhw4d0iuvvKKsrCzdd999uvHGG3XgwAGlpqZq8eLFOn/+vLZv365+/frpwIED6t+/vySpoqJCBw4c0Guvvabc3FwdPnxYn376qc1bBCDWCBkAjtQWMDt27NC1114rSXr++eeVl5enTZs26Zvf/KZqa2v1jW98Q1deeaUk6fLLL488vra2VldffbUmTZokSSooKEj4NgCIPw4tAXAkn8+nlJQUTZkyJbIsJydHRUVF8vl8kqQlS5bokUce0dSpU7V8+XK9++67kfsuWrRIGzduVElJie69917t3Lkz4dsAIP4IGQDG+u53v6uPPvpIt912m9577z1NmjRJTz75pCRp9uzZ+vjjj3X33Xfr+PHjuuGGG3TPPffYPDGAWCNkADiS1+tVa2ur3nrrrciyTz75RAcPHlRxcXFkWV5env7lX/5FL730kn74wx9q/fr1kdsGDx6s+fPn67nnnlNVVZXWrVuX0G0AEH+cIwPAkcaMGaObb75ZCxcu1C9/+UtlZmbq/vvv18iRI3XzzTdLkpYuXarZs2dr7NixOnPmjLZt2yav1ytJeuihhzRx4kSNGzdOzc3NevXVVyO3Aeg7eEcGgGM988wzmjhxoubMmaNrrrlGlmVp8+bNSk1NlSSFQiEtXrxYXq9XX/nKVzR27Fg99dRTkqS0tDSVl5dr/Pjxuv7665WcnKyNGzfauTkA4sBlWZZl9xAAAADR4B0ZAABgLEIGAAAYi5ABAADGImQAAICxCBkAAGAsQgYAABiLkAEAAMYiZAAAgLEIGQAAYCxCBgAAGIuQAQAAxiJkAACAsf4PQCpQwnVN9OIAAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import seaborn as sns\n",
    "import matplotlib.pyplot as plt\n",
    "  \n",
    "\n",
    "sns.histplot(data=df, x=\"loss\", color=\"blue\", label=\"in\", log_scale=True)\n",
    "sns.histplot(data=df, x=\"out\", color=\"red\", label=\"out\", log_scale=True)\n",
    "\n",
    "plt.legend() \n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "05bd3cc7-e9f6-438b-9e72-7a67d682b590",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 106,
   "id": "3501d38f-7388-44cb-8b49-3c03c82ab8c8",
   "metadata": {},
   "outputs": [],
   "source": [
    "def imshow(img):\n",
    "    img = img\n",
    "    npimg = img\n",
    "    plt.imshow(npimg)\n",
    "    plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 107,
   "id": "94652e56-d847-4409-89fa-99e1b48e52d5",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaAAAAGdCAYAAABU0qcqAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAvuUlEQVR4nO3de3Cc9X3v8c/eV9eVdZcsycgXbIwvaRwwOiSUYNeXnjIQPB1IcqYmZWCgMlNw0yTuJBBoe0TJTEKSccwfpbiZE0NCTwyFaaBgsDhJbFI7OI4TomBHYBlb8lX322r3OX+kqBUY+H1tyT9JvF8zO2Npv/7q9zzP7n71aHc/GwqCIBAAABdY2PcCAAAfTgwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXUd8LeKdsNqujR4+qoKBAoVDI93IAAEZBEKinp0fV1dUKh9/7PGfSDaCjR4+qtrbW9zIAAOepra1NNTU173n9hA2gzZs362tf+5ra29u1dOlSffvb39bll1/+gf+voKBAkrS4rlaR95mc/92MGQnndd100xrnWklq7+x3rj3dbztjy2YyzrUZQ60kxaIR59rKsjJT7+GRrKm+te2Ie3EoZuqdSOQ413Z1njH17u0ddK4Nq8DUe2697Zes3Nx859pMJtfU+5e/es25Nq/E1vvKVZ9wrj145JCpd39vt3PtJbPnmXq3tLxpqo+G3e+fJfnut1lJioy4P64UlJaaep860+VcO9PQe2BgQJ/fePvo4/l7mZAB9P3vf18bN27Uww8/rOXLl+uhhx7S6tWr1dLSovLy8vf9v2//2S0SDjsPoGjE/cE2J+k+rCQpmRxxrk1kpuYAShr3Sdg4gOLxuHuxeQC5947Hbb1jMfd9HpZhGyUlEklTfdJQn8nYHuBiMcs+tN1WcnLdB1YiaVv3yMjwhKxDkuLG4xMzDKBk0tbbMoBycmzbmRww7ENjb0kf+DTKhLwI4etf/7puvfVWfe5zn9PChQv18MMPKzc3V//0T/80ET8OADAFjfsAGh4e1t69e7Vy5cr/+iHhsFauXKldu3a9q35oaEjd3d1jLgCA6W/cB9DJkyeVyWRUUVEx5vsVFRVqb29/V31TU5NSqdTohRcgAMCHg/f3AW3atEldXV2jl7a2Nt9LAgBcAOP+IoTS0lJFIhF1dHSM+X5HR4cqKyvfVZ9IJJRI2J7YBABMfeN+BhSPx7Vs2TLt2LFj9HvZbFY7duxQQ0PDeP84AMAUNSEvw964caPWr1+vj33sY7r88sv10EMPqa+vT5/73Ocm4scBAKagCRlAN954o06cOKF77rlH7e3t+shHPqJnn332XS9MAAB8eE1YEsKGDRu0YcOGc/7/rSczCoUCp9og6VYnSYcOH7YtJOL+5rjuE2lT66qzPCf2Xkxv5pSUTLrXV5WWmHqXV73/m4nfafHiuc61pzs7Tb17utzfFDs4UGXqnR5xfxPyqRMDpt6ZIfc3AErSb1rd0woGh21viC4vd78dnuo6Yep98Bd7nWuPnHzL1Du/0P2+GQy7v+NfknqO/MZUX17inoSRTBabeh9pdU8SSQYzTb2r8t2ffy+JuN+u+iNu9wfvr4IDAHw4MYAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeTFgUz/nKJqSQ43gczOY59z3Y6h7dIkknTvzWufadH0HxQerqZjnXlpaVmnovXHCxc23nGdun0Pb09Zrq82e4x5QosP1ONNDvHoGTzdqOfSLhHmc0MtJp6t1zxlbf8pufOdcGEds+LCq73Lk2GbVF2kTSGefaYvdkHUlSXsL9ft91vMfUO5bjHsMkSZHcQefavPKYqXf/4X7n2t2vHjD1rqmqca7trYo41w4Ouu0PzoAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXkzaLLggk5GygVPt6TPu2WS/+M2bpnWkCt13UTiZNPU+cvy4c+1Axj1TS5I+8rGPOde2tr1h6j1oyPeSpIEh91ytpR+9zNT75JnTzrUzig2ZdJJOnD7lXDscuN1W35bJ2Orr69wzu5IFtqyxvJyQc21ne6epd3972rn2ik9eber9w6d/5Fwb2G6y6u11v29K0qGDQ8617cfaTL3rL1roXBuWLfPulZ//3Ln2oovOONcODw871XEGBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwYtJG8UQzcYVCbvOxsDDPuW9ukXvsiCQFkaxzbWVNnam3a1yFJJVXVJh6h+IJ59pfvHbQ1HvR4o+a6t889pZz7f4DtqikM2fc43IuTs4y9e443elcm4jmmHofP91nqi/ML3OujSVsv1eG5R5RVJyaberde9o9viU84n5fk6R589xv4/HIoKn3ySO2OCMp7lxZWZpv6pzMuO+XmSW2Y5+zNNe5tqevw7k2nHWL3+IMCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAODFpM2CC0UyCocCp9rKyhLnvlnZMqG6urqdawf7Mqbevb29zrU5CVt+1KmTXc61x46fNvVeGnfPvZKk4tJC59q2I7819Q5C7jlZI6o29Y7H3PPAynLdM7UkqT3mdtt+23DWvX5kwPZ75cnOk861uTm2zLueYffb+FsnfmfqvWCOewbkRbPcHyMkKT93nqk+EXO/TwwPpUy9f/TsUefakhz3fElJuuqKcufan+0dcq4dHEw71XEGBADwYtwH0Fe/+lWFQqExlwULFoz3jwEATHET8ie4Sy+9VC+88MJ//ZDopP1LHwDAkwmZDNFoVJWVlRPRGgAwTUzIc0Cvv/66qqurNXv2bH32s5/V4cOH37N2aGhI3d3dYy4AgOlv3AfQ8uXLtXXrVj377LPasmWLWltb9YlPfEI9PT1nrW9qalIqlRq91NbWjveSAACT0LgPoLVr1+pP//RPtWTJEq1evVr/9m//ps7OTv3gBz84a/2mTZvU1dU1emlraxvvJQEAJqEJf3VAUVGRLr74Yh08ePCs1ycSCSUS7p/tDgCYHib8fUC9vb06dOiQqqqqJvpHAQCmkHEfQJ///OfV3NysN954Qz/96U/1qU99SpFIRJ/+9KfH+0cBAKawcf8T3JEjR/TpT39ap06dUllZmT7+8Y9r9+7dKisrszUKMgrkFj/y1lvuzxsNp21RPPl57hE4eTm2PyXmGdJbsoYolt/Xu9dGIrabQcjxuLxtaKDfuXbEeHxy8pPOtV1nbJFDQ4YYpoEBWwxTeZ97rIkkxWLuB7Qvz3DwJQ0bbraDsQFT77wZ7veftG0XquOY+1oSxke6krKIrb7IfSd2nnG/P0hSZ5d7fU3pTFPvtrfO/tTI2Qym3aOPBkfczm3GfQA9/vjj490SADANkQUHAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPBiwj+O4VzFYnGFw27zMRJxz22KBrZNjifizrV5ee5ZSZKUn++ek2WplaQewyfL1lRVm3qf6Dhhqu/v7XOuzQyPmHr3dfc61x7LHjH1zvS4Z3CdNuZ75aWHTfWFxe7HPx5LmXpXlBY614aTOabekZEC59oe44ch95xxr/1ti+1zxkIRW+ZdRYn7PozJdnxmFM5zro0maky9D7zmngU3MOL+ODs07FbLGRAAwAsGEADACwYQAMALBhAAwAsGEADACwYQAMALBhAAwAsGEADACwYQAMALBhAAwItJG8UTjUYUDrvFOaRS7tEWhYW2uJz+fvdIjjNnDNkgknJy3GNN2tvbTb1PnTrlXJuXm2vq3fbmYVP9wID7PjxxosPUuyDlHlETZDOm3n2nu5xrB9vd97ckZROBqT4n4R71Uxy3xfzE+04714bDMVPvUMz9thWL2h6OoiH3tWSzZabeuQW2tWQzCefaglSJqfdg2j0CxxoJtXDhCufaX/z6t861mcyQUx1nQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvJm0W3MDgoMIht/nY1tbm3Le4pMi0jq4u9zyw/j73zDNJyslJOtdms7bssGjEPT8qJ+G+DkkKmaql9MiIe+2wLccsoqxzbV6BLQewJ+1+PHsCt+yrt40M2vZicNQ9Z7C3131/S1KxIR8xmXTPL5SkWNL9dtgzYrv/9PS61weBbX8XFxeb6udftMS5dk5dtan3wdY3nGtHRmxZcMq459Llxtzz7iKOd0vOgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeTNosuExmRFnHLLi+Lvf8o4EBW1ZSOOw+o9NpWwbX8HDPhKxDkmIx90MbZDKm3tFozFSflXuOXdYYNNfXO+hcmzzda+odjhU61wbRPlPvxJDxrjfgvmOyWdvxHJT77Xawu9vUuy990rm2u9e2D6MJ92yyyqqZpt6tb7jnS0pSeVHKuTbXsG5Jyst3z+rrG7Ddxg++cci59kyn+7Ecdsx05AwIAOCFeQC9/PLLuvbaa1VdXa1QKKQnn3xyzPVBEOiee+5RVVWVcnJytHLlSr3++uvjtV4AwDRhHkB9fX1aunSpNm/efNbrH3zwQX3rW9/Sww8/rFdeeUV5eXlavXq1Bgfd/1QCAJj+zM8BrV27VmvXrj3rdUEQ6KGHHtKXv/xlXXfddZKk7373u6qoqNCTTz6pm2666fxWCwCYNsb1OaDW1la1t7dr5cqVo99LpVJavny5du3addb/MzQ0pO7u7jEXAMD0N64DqL29XZJUUVEx5vsVFRWj171TU1OTUqnU6KW2tnY8lwQAmKS8vwpu06ZN6urqGr1YPl4bADB1jesAqqyslCR1dHSM+X5HR8fode+USCRUWFg45gIAmP7GdQDV19ersrJSO3bsGP1ed3e3XnnlFTU0NIznjwIATHHmV8H19vbq4MGDo1+3trZq3759Ki4uVl1dne666y793d/9nebNm6f6+np95StfUXV1ta6//vrxXDcAYIozD6A9e/bok5/85OjXGzdulCStX79eW7du1Re+8AX19fXptttuU2dnpz7+8Y/r2WefVTKZNP2cIMhIjhEuOckcQ1/TMpTJZJ1rs+6l/9nbPTIlErH1ltx7RyPGKB5jlEg47B4jkw5sOzETuEV+SNJg5xlT72zy7H82Ppv8uHsUiyQp3fHBNf/NjFz3u+pQ1n2fSFJbe6d772HbH02iQdq5NqfAPXJGkmpnusfrfOQP/sDU2/rm+fYTZ3+R1dkc6Sg19a6vc9/Okf4BU2/F3B8QC0uKnWuHhoac6swD6Oqrr1bwPo/ioVBI999/v+6//35rawDAh4j3V8EBAD6cGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvzFE8F8rISEahkFsu2NCQe5ZZ2BqqZgiPs2S7/b61e++w8VeFiGE78wsKTL1dc57eFhjy3RKGXD9JiuXFnWvzM/2m3hXJHufaqoR75pkkLUj1murnxd33y5EB930iSf+n3f34HMqfYepdEnPPAYzGbQ9HpSUlzrWzZ9ebei9ffrmp/vHHvudc++bhw6bec+tnOdeWF9mOz8Cg+325d2jEuTYaGXSq4wwIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAODFpI3iscTUJJNJ51prjIzck0TM4nH3yBTLNkpSJuMem6GQbSNThYWm+vSAWyyHJGXS7sddkirkvl/+sMwWUfM/qtwjimbnG/a3pLzWDlt9l3uM0FDUtp2dqVzn2v8btkVZDaTdY37SaVuc0fWfut659n/+yZ+YeoeN2VdvvvE759r/t/MlU++eHvdIqOoSWxRP0nA8M9lh59qQYyoZZ0AAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALyZtFlwkElEo5DYfIxH3PKNs1j2bSrJl0llZsuCsLOvu6u6yNc/NM5UXJN3rk4MDpt7/K93mXLtmxHYsZyQWOdfm/8FKU+/2DsewrP/UefQXzrU56dOm3otC7tl++3NsmYQDF811rn2r/Zip98kTJ51rrVmKlscUSVqyZIlz7c92/dTU+/Rp9+NZlrLdNwO53ye6O93396Bj5iZnQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALyZtFE88HneO4gkC93gdayRHNOq+izIZW7xKOp2ekFpJCkfcf7eIJmw3gyBjizPKTbjv85m5uabeHxnoda6ddard1Lsn53Xn2s4O98gZSUrGbfswXmqoHbJFDi2U+z7/aEmVqffusHukTX5evqn3Sy+95Fx72fLLTb3fOvqWqf6Zf/1X59qcnBxT7/x89/0ybIwaG0m7ReZI0vBwt3NtenjYqY4zIACAFwwgAIAX5gH08ssv69prr1V1dbVCoZCefPLJMdfffPPNCoVCYy5r1qwZr/UCAKYJ8wDq6+vT0qVLtXnz5vesWbNmjY4dOzZ6eeyxx85rkQCA6cf8IoS1a9dq7dq171uTSCRUWVl5zosCAEx/E/Ic0M6dO1VeXq758+frjjvu0KlTp96zdmhoSN3d3WMuAIDpb9wH0Jo1a/Td735XO3bs0D/8wz+oublZa9eufc+XKDc1NSmVSo1eamtrx3tJAIBJaNzfB3TTTTeN/nvx4sVasmSJ5syZo507d2rFihXvqt+0aZM2btw4+nV3dzdDCAA+BCb8ZdizZ89WaWmpDh48eNbrE4mECgsLx1wAANPfhA+gI0eO6NSpU6qqsr2DGgAwvZn/BNfb2zvmbKa1tVX79u1TcXGxiouLdd9992ndunWqrKzUoUOH9IUvfEFz587V6tWrx3XhAICpzTyA9uzZo09+8pOjX7/9/M369eu1ZcsW7d+/X//8z/+szs5OVVdXa9WqVfrbv/1bJRIJ088Jy/30LGTIm8ox5JJJUjwec67NZm1ZcKdPdzrXZtK23hHHHD1JkrF3Nmyr7x8YcK4NyspNvQ9Gq51r64Zs687p73SuDSfeMPUenmfLa8vMme9c2/pGj6n3wQ73rLFW4z783cnfOdfGjY8Rw1m3vDFJavrff2/qffL4SVO95H48Fy6YZ+qcX1DgXBuL2x7fsobsuJrqGufagcFBpzrzALr66qsVBO+9s5977jlrSwDAhxBZcAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAAL8b984DGSywcVTjsNh9HRkac+8Yjtk1ORA1ZcIFtnkdCIefaZNyWk1VcVORcmxN3z9KTpJBh3ZI0nHbP7BowruW1spnuvVs7Tb1Let3XPXLU1vt0xrYPT/2u37m27aR7vpckvTXsltslSb3JLlPviOF4dnafsfWOuPc+fcqW7VaUP8NUX1pa4lxrzaOMGfIo49G4qXck5n47DGK57n3DbvmPnAEBALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALyYtFE8Fq6RPZIUki0CJWTobQuRkQytlV+QZ+o9c2aFc20kFJh6W6N4gqx7NMzxM6dMvffl1DnX7sm6R4lIUqS717k2+5NOU+9oXpmpfrDT/fgHEdvxHMxxPz7DYffYK0kKhtxvK4mYe+SMJEUNUTxxY5RVXtIWl5OX496/qDDf1Dtp2C9x4z7MGB4Pg8D9djWSyTjVcQYEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8GLSZsGFQiHnzLGkIbcpkDEna2DIuTaRtOUwJZNx59qLLppp6n39ddc61545cdrU+7XXXjPV5+W6Z7C9/rtWU++u3n7n2iBly1873eN+7HNO2bLD4kHaVJ+OumVrSVIwaMtrG+h37x1NuN9mJSlmyA+LWsIRJYUNOWbGeDyFAvd9IkmJqPvaZ6QKTb1zDI8TxphGU66jpdY1n5MzIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAF5M2iicIAgWOMR7ZbNa5ryW2R5LC4Yh7bcSWg2GJ4pkzZ5apdzzu/rtFQUGeqfe8eXNM9SdPnXSunVlZbur95u8OO9fm5tq2syCWcK7NGGJ7JKm/+5SpPpR0z5LJDrjfZiVJI+63w7Dc94kkKeIeaRMyROtIUsRwfwsZo3jykrbtLC8tca4tKsw39Y7I/fEtZIwas8TrBIbHWTk+dnMGBADwwjSAmpqadNlll6mgoEDl5eW6/vrr1dLSMqZmcHBQjY2NKikpUX5+vtatW6eOjo5xXTQAYOozDaDm5mY1NjZq9+7dev7555VOp7Vq1Sr19fWN1tx99916+umn9cQTT6i5uVlHjx7VDTfcMO4LBwBMbabngJ599tkxX2/dulXl5eXau3evrrrqKnV1demRRx7Rtm3bdM0110iSHn30UV1yySXavXu3rrjiivFbOQBgSjuv54C6urokScXFxZKkvXv3Kp1Oa+XKlaM1CxYsUF1dnXbt2nXWHkNDQ+ru7h5zAQBMf+c8gLLZrO666y5deeWVWrRokSSpvb1d8XhcRUVFY2orKirU3t5+1j5NTU1KpVKjl9ra2nNdEgBgCjnnAdTY2KgDBw7o8ccfP68FbNq0SV1dXaOXtra28+oHAJgazul9QBs2bNAzzzyjl19+WTU1NaPfr6ys1PDwsDo7O8ecBXV0dKiysvKsvRKJhBIJ43sLAABTnukMKAgCbdiwQdu3b9eLL76o+vr6MdcvW7ZMsVhMO3bsGP1eS0uLDh8+rIaGhvFZMQBgWjCdATU2Nmrbtm166qmnVFBQMPq8TiqVUk5OjlKplG655RZt3LhRxcXFKiws1J133qmGhgZeAQcAGMM0gLZs2SJJuvrqq8d8/9FHH9XNN98sSfrGN76hcDisdevWaWhoSKtXr9Z3vvOdcVksAGD6MA0gl2y2ZDKpzZs3a/Pmzee8KEkqLStTJOKWaXXs6FHnvp2dnaZ1FBamnGtz83JMvUNh9xymgsJcU++8fPfMuxmFRabeXd1nTPXptHtOWm6BbTtHDNlXvT39pt45Cfd92DcyaOo9lLU975mbdl9LoGFT7yDqntcWThjywCQlwu45c5ZcMkkKh9yfQZjxjlfmfpBZdbZX4xYVFTrXxqK2rL54zPIwbXtdWTbsfv8JLDlzjo9tZMEBALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALw4p49juBB6+voUDrtFVkQNURVFSffIDEnq7upxru3v7zX1Lky5x6sUF5WYepeVlDnXhmWLBlmyZImpfmBgwLm27cgRU+9ojnukzWCv+zokKSb3iJq8wnxT7/yQe0SNJGXT7jEow0GfqXc4POJcGzXER0mSsu7rjkRtD0czimY4186pn23qXVhoi9WKGdZeUGB7DIrF3B8nhkdsUUlByP02rrDhfMWxljMgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBeTNguuu7dX4ZDbfCybkXLue8Xly0zriMfcs8ZafvtbU+/ftbrXDw4MmXqXlVU41w4NDJp6x2IxU/38+Rc717a0tJh6R+SefZVK2fLaNOKek5Ubt2WHBcZf/YYiaUO1rXkm454FGBgy6SQpGnd/iKkoLzf1rqqqcq7NSbrfjyUpHrdl9VXPnOlcm0q5Z9iZhdxz/SQpMOQdWo68ay1nQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALxhAAAAvGEAAAC8YQAAALyZtFE8yGlU47DYfZ9XWOPddtPAS0zpmGiI2rlhui/l5dd/PnWvDtgQUdZ4+41ybk2OLkRkYGDDV9/f3O9dGou6xMJKUybpH8STitt5B1n2np0csUTlSJuMegSJJI1n3iJVQ4L5PJCkRdX8YyM/PM/WuqnSPy6msrDT1tkgYo3guumiWqb6iwj36KhQKmXqPjNjidaYSzoAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXkzaLLhYNOycBVdd5Z7DVFNty5sqyHfPkJqRsmWqVa5a6Vz7xhtvmHr3dfc61/b09Jh6nzhxwlS/a9cu59qwMSfLUp1O2/Lawhn3LLhIxJYzFzH+6hc15LUlk/mm3oWFhc61qaIiU+9kMulcm8kMm3oXFxc7185fMN/Uu7S01FRvudlacwCDwBgEOYVwBgQA8MI0gJqamnTZZZepoKBA5eXluv7669XS0jKm5uqrr1YoFBpzuf3228d10QCAqc80gJqbm9XY2Kjdu3fr+eefVzqd1qpVq9TX1zem7tZbb9WxY8dGLw8++OC4LhoAMPWZngN69tlnx3y9detWlZeXa+/evbrqqqtGv5+bmzuhn+0BAJj6zus5oK6uLknvfjLwe9/7nkpLS7Vo0SJt2rTpfT+QbGhoSN3d3WMuAIDp75xfBZfNZnXXXXfpyiuv1KJFi0a//5nPfEazZs1SdXW19u/fry9+8YtqaWnRD3/4w7P2aWpq0n333XeuywAATFHnPIAaGxt14MAB/fjHPx7z/dtuu23034sXL1ZVVZVWrFihQ4cOac6cOe/qs2nTJm3cuHH06+7ubtXW1p7rsgAAU8Q5DaANGzbomWee0csvv6yampr3rV2+fLkk6eDBg2cdQIlEQomE7fPaAQBTn2kABUGgO++8U9u3b9fOnTtVX1//gf9n3759kqSqqqpzWiAAYHoyDaDGxkZt27ZNTz31lAoKCtTe3i5JSqVSysnJ0aFDh7Rt2zb98R//sUpKSrR//37dfffduuqqq7RkyZIJ2QAAwNRkGkBbtmyR9Ps3m/53jz76qG6++WbF43G98MILeuihh9TX16fa2lqtW7dOX/7yl8dtwQCA6cH8J7j3U1tbq+bm5vNa0NsioUDhkFsGUmWFe25TQZ4try0acc9hisZMrRWPuq9lRqrI1PvUydPOtfMumWfqnZeXZ6qfPXu2c+2r//knW1fZIOtc29frno8nSak890w16z5JxGzvgIjH3bPmcnJzTb1jMfcbbtT4xo1w2P3+U1rmnu0mSZdeeqlzbUlJiam3LWVwYo2MjPhewoQhCw4A4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4AUDCADgBQMIAOAFAwgA4MU5fx7QRCsuKlQk4hY/MvuiWc5941HrJqedK2MRW3xHPOa+lqqKClPvAwd+5Vx7pueMqffChQtN9cPDw861Q4ODpt6RsPvvUIOZjKl3xlAfZN0jgX7f21g/4h5pk0m7729JShhuh8Uziky95y2Y71xbV1dn6p2X6x5/ZIlskqRsxn1/Sx8cU/bfhUK2x4lwyP02HpLtNm4RZN230bWWMyAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAFwwgAIAXDCAAgBcMIACAF5M2C+76a/9YyUTCqbaqosy5ryFWSZIUjSadayPhmK25Yf4XFBaYOn/sY8uca5/b8byp9949e031p0+ddq7Njtgyu4pyc51rF198sal3Iu52+5MkGbLAJCmZNPSWlJPjfjtMpVKm3pWVlRNSK0m5ee55bVYDA+65gVFjBqQ1r82Wk2a7jVvWEjE+wEUCw3YatjFMFhwAYDJjAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALyYtFE8CxcuUJ5jzEo47D5HY/G4aR2xmHu8TiQSMfW2xINYo0Tq59Q7166NrzX13r17t6m+LdHmXGvZ35K0aMEC59pLL73U1NtyPC23QUmKGY9n2LAW6z6MG+4TgTFyaGBgwLl2cNA9WscskzGVx6K2fRiOuh+f7IhtHwYaMVVbWBKHAkNv11rOgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeMIAAAF4wgAAAXjCAAABeTNosuCAkZR1zisIRwxwNG8KPJMmQ8RUyZsGFDPlRIcs2ypZNNnfePFPvmpoaU31XV5dzrTUjLdcxL1A6l0PvfnysOYBW2ax7lpktDcz2H7LZrKl1bn6ec208mTT1Hk4PO9dmM7Z1Z4170bJfMsbeI4ZjnzZm3o0Y1m3p7VrLGRAAwAvTANqyZYuWLFmiwsJCFRYWqqGhQT/60Y9Grx8cHFRjY6NKSkqUn5+vdevWqaOjY9wXDQCY+kwDqKamRg888ID27t2rPXv26JprrtF1112nX/3qV5Kku+++W08//bSeeOIJNTc36+jRo7rhhhsmZOEAgKnN9Af3a6+9dszXf//3f68tW7Zo9+7dqqmp0SOPPKJt27bpmmuukSQ9+uijuuSSS7R7925dccUV47dqAMCUd87PAWUyGT3++OPq6+tTQ0OD9u7dq3Q6rZUrV47WLFiwQHV1ddq1a9d79hkaGlJ3d/eYCwBg+jMPoF/+8pfKz89XIpHQ7bffru3bt2vhwoVqb29XPB5XUVHRmPqKigq1t7e/Z7+mpialUqnRS21trXkjAABTj3kAzZ8/X/v27dMrr7yiO+64Q+vXr9evf/3rc17Apk2b1NXVNXppa3P/+GYAwNRlfh9QPB7X3LlzJUnLli3Tf/zHf+ib3/ymbrzxRg0PD6uzs3PMWVBHR4cqKyvfs18ikVAikbCvHAAwpZ33+4Cy2ayGhoa0bNkyxWIx7dixY/S6lpYWHT58WA0NDef7YwAA04zpDGjTpk1au3at6urq1NPTo23btmnnzp167rnnlEqldMstt2jjxo0qLi5WYWGh7rzzTjU0NPAKOADAu5gG0PHjx/Vnf/ZnOnbsmFKplJYsWaLnnntOf/RHfyRJ+sY3vqFwOKx169ZpaGhIq1ev1ne+851zWlhxWZny89xiPCwJK9GY7a+OsWjMuTZsjGOJWGJ+DLWSNDzsHlMyNDho6m2NnZkxY4ap3sIUUWPM4snIPaYksCW92G8r8bhzbdoQUSPZYpuygS3qxRKBE42739ck2/FJJG33e+vjRGbEfb9kjHFGOXnucVMjhnVItnWPjIw41/b19zvVmfbyI4888r7XJ5NJbd68WZs3b7a0BQB8CJEFBwDwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8IIBBADwggEEAPCCAQQA8MKchj3RgiCQ5B7lIE1wFE/EEsVjm+eTJYpneDht6m2JbpH+65hOBEsUT8gYxWMRDtn2SThsjOIx3LbSI8bjaVi7tbcpiidqu28ODw8510YiUzeKJ5txj8AxR/EYjo8liqf/Px+/P+i+Hwom8tHhHBw5coQPpQOAaaCtrU01NTXvef2kG0DZbFZHjx5VQUGBQqH/+o21u7tbtbW1amtrU2FhoccVTiy2c/r4MGyjxHZON+OxnUEQqKenR9XV1e/7F5NJ9ye4cDj8vhOzsLBwWh/8t7Gd08eHYRsltnO6Od/tTKVSH1jDixAAAF4wgAAAXkyZAZRIJHTvvfcqkUj4XsqEYjunjw/DNkps53RzIbdz0r0IAQDw4TBlzoAAANMLAwgA4AUDCADgBQMIAODFlBlAmzdv1kUXXaRkMqnly5frZz/7me8ljauvfvWrCoVCYy4LFizwvazz8vLLL+vaa69VdXW1QqGQnnzyyTHXB0Gge+65R1VVVcrJydHKlSv1+uuv+1nsefig7bz55pvfdWzXrFnjZ7HnqKmpSZdddpkKCgpUXl6u66+/Xi0tLWNqBgcH1djYqJKSEuXn52vdunXq6OjwtOJz47KdV1999buO5+233+5pxedmy5YtWrJkyeibTRsaGvSjH/1o9PoLdSynxAD6/ve/r40bN+ree+/Vz3/+cy1dulSrV6/W8ePHfS9tXF166aU6duzY6OXHP/6x7yWdl76+Pi1dulSbN28+6/UPPvigvvWtb+nhhx/WK6+8ory8PK1evVqDg4MXeKXn54O2U5LWrFkz5tg+9thjF3CF56+5uVmNjY3avXu3nn/+eaXTaa1atUp9fX2jNXfffbeefvppPfHEE2pubtbRo0d1ww03eFy1nct2StKtt9465ng++OCDnlZ8bmpqavTAAw9o79692rNnj6655hpdd911+tWvfiXpAh7LYAq4/PLLg8bGxtGvM5lMUF1dHTQ1NXlc1fi69957g6VLl/pexoSRFGzfvn3062w2G1RWVgZf+9rXRr/X2dkZJBKJ4LHHHvOwwvHxzu0MgiBYv359cN1113lZz0Q5fvx4IClobm4OguD3xy4WiwVPPPHEaM1rr70WSAp27drla5nn7Z3bGQRB8Id/+IfBX/7lX/pb1ASZMWNG8I//+I8X9FhO+jOg4eFh7d27VytXrhz9Xjgc1sqVK7Vr1y6PKxt/r7/+uqqrqzV79mx99rOf1eHDh30vacK0traqvb19zHFNpVJavnz5tDuukrRz506Vl5dr/vz5uuOOO3Tq1CnfSzovXV1dkqTi4mJJ0t69e5VOp8cczwULFqiurm5KH893bufbvve976m0tFSLFi3Spk2bRj9+YCrKZDJ6/PHH1dfXp4aGhgt6LCddGOk7nTx5UplMRhUVFWO+X1FRod/85jeeVjX+li9frq1bt2r+/Pk6duyY7rvvPn3iE5/QgQMHVFBQ4Ht54669vV2Sznpc375uulizZo1uuOEG1dfX69ChQ/qbv/kbrV27Vrt27VIkYvtcoMkgm83qrrvu0pVXXqlFixZJ+v3xjMfjKioqGlM7lY/n2bZTkj7zmc9o1qxZqq6u1v79+/XFL35RLS0t+uEPf+hxtXa//OUv1dDQoMHBQeXn52v79u1auHCh9u3bd8GO5aQfQB8Wa9euHf33kiVLtHz5cs2aNUs/+MEPdMstt3hcGc7XTTfdNPrvxYsXa8mSJZozZ4527typFStWeFzZuWlsbNSBAwem/HOUH+S9tvO2224b/ffixYtVVVWlFStW6NChQ5ozZ86FXuY5mz9/vvbt26euri79y7/8i9avX6/m5uYLuoZJ/ye40tJSRSKRd70Co6OjQ5WVlZ5WNfGKiop08cUX6+DBg76XMiHePnYftuMqSbNnz1ZpaemUPLYbNmzQM888o5deemnMx6ZUVlZqeHhYnZ2dY+qn6vF8r+08m+XLl0vSlDue8Xhcc+fO1bJly9TU1KSlS5fqm9/85gU9lpN+AMXjcS1btkw7duwY/V42m9WOHTvU0NDgcWUTq7e3V4cOHVJVVZXvpUyI+vp6VVZWjjmu3d3deuWVV6b1cZV+/6m/p06dmlLHNggCbdiwQdu3b9eLL76o+vr6MdcvW7ZMsVhszPFsaWnR4cOHp9Tx/KDtPJt9+/ZJ0pQ6nmeTzWY1NDR0YY/luL6kYYI8/vjjQSKRCLZu3Rr8+te/Dm677bagqKgoaG9v9720cfNXf/VXwc6dO4PW1tbgJz/5SbBy5cqgtLQ0OH78uO+lnbOenp7g1VdfDV599dVAUvD1r389ePXVV4M333wzCIIgeOCBB4KioqLgqaeeCvbv3x9cd911QX19fTAwMOB55Tbvt509PT3B5z//+WDXrl1Ba2tr8MILLwQf/ehHg3nz5gWDg4O+l+7sjjvuCFKpVLBz587g2LFjo5f+/v7Rmttvvz2oq6sLXnzxxWDPnj1BQ0ND0NDQ4HHVdh+0nQcPHgzuv//+YM+ePUFra2vw1FNPBbNnzw6uuuoqzyu3+dKXvhQ0NzcHra2twf79+4MvfelLQSgUCv793/89CIILdyynxAAKgiD49re/HdTV1QXxeDy4/PLLg927d/te0ri68cYbg6qqqiAejwczZ84MbrzxxuDgwYO+l3VeXnrppUDSuy7r168PguD3L8X+yle+ElRUVASJRCJYsWJF0NLS4nfR5+D9trO/vz9YtWpVUFZWFsRisWDWrFnBrbfeOuV+eTrb9kkKHn300dGagYGB4C/+4i+CGTNmBLm5ucGnPvWp4NixY/4WfQ4+aDsPHz4cXHXVVUFxcXGQSCSCuXPnBn/9138ddHV1+V240Z//+Z8Hs2bNCuLxeFBWVhasWLFidPgEwYU7lnwcAwDAi0n/HBAAYHpiAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8YAABALxgAAEAvGAAAQC8+P97tlGt2lCeiQAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 640x480 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "imshow(X_data[pri_risk_rank[idx]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 100,
   "id": "9a280b28-71e2-4ac0-be7a-cab54c5af113",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "6"
      ]
     },
     "execution_count": 100,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Y_data[pri_risk_rank[idx]]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "63075866-0819-4386-8e3d-ee79a873b1fd",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "opacus",
   "language": "python",
   "name": "opacus"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
