{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "5e012f20-9c85-488e-9e61-725606ca819c",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2024-06-25 17:59:49.157211: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n",
      "2024-06-25 17:59:49.183606: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n",
      "To enable the following instructions: AVX2 AVX_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
      "2024-06-25 17:59:49.638347: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n"
     ]
    }
   ],
   "source": [
    "from monai.utils import first, set_determinism\n",
    "from monai.transforms import (\n",
    "    AsDiscrete,\n",
    "    AsDiscreted,\n",
    "    EnsureChannelFirstd,\n",
    "    Compose,\n",
    "    CropForegroundd,\n",
    "    LoadImaged,\n",
    "    Orientationd,\n",
    "    RandCropByPosNegLabeld,\n",
    "    ScaleIntensityRanged,\n",
    "    Spacingd,\n",
    "    Invertd,\n",
    "    LoadImage\n",
    ")\n",
    "\n",
    "from monai.data import CacheDataset, DataLoader, Dataset, decollate_batch\n",
    "import torch\n",
    "import tempfile\n",
    "import shutil\n",
    "import os\n",
    "import glob\n",
    "import torch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "7e934714-5c23-4911-a3ce-1b64ead6a446",
   "metadata": {},
   "outputs": [],
   "source": [
    "from monai.transforms import (\n",
    "    AsDiscrete,\n",
    "    AsDiscreted,\n",
    "    Activations,\n",
    "    EnsureChannelFirst,\n",
    "    Compose,\n",
    "    CropForeground,\n",
    "    LoadImage,\n",
    "    Orientation,\n",
    "    ScaleIntensity,\n",
    "    RandRotate,\n",
    "    RandFlip,\n",
    "    RandZoom,\n",
    "    LoadImage,\n",
    "    Lambda,\n",
    "    Resize\n",
    ")\n",
    "import numpy as np\n",
    "from PIL import Image\n",
    "from monai.data import PILReader\n",
    "from monai.networks.nets import DenseNet121\n",
    "from monai.metrics import ROCAUCMetric\n",
    "from torch.utils.tensorboard import SummaryWriter\n",
    "import pandas as pd\n",
    "import random"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "6392f6f2-7793-49d1-8f32-7c91e32f0578",
   "metadata": {},
   "outputs": [],
   "source": [
    "# from PIL import Image\n",
    "# np.array(Image.open(train_files[0]['image'])).shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "3d02a6f6-283a-46f6-82b0-0cf8adc421ad",
   "metadata": {},
   "outputs": [],
   "source": [
    "train_transforms = Compose(\n",
    "    [\n",
    "        LoadImage(image_only=True),\n",
    "        Lambda(func=lambda x: np.expand_dims(np.dot(x[..., :3], [0.299, 0.587, 0.114]).astype(np.uint8),0) if len(x.shape) == 3 else np.expand_dims(x,0)),\n",
    "#         EnsureChannelFirst(),\n",
    "        ScaleIntensity(),\n",
    "#         RandRotate(range_x=np.pi / 12, prob=0.5, keep_size=True),\n",
    "#         RandFlip(spatial_axis=0, prob=0.5),\n",
    "#         RandZoom(min_zoom=0.9, max_zoom=1.1, prob=0.5),\n",
    "        Resize((512,512)),\n",
    "#         Lambda(func=lambda x: x[0]),\n",
    "    ]\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "b86bf485-2c37-40b8-9c6e-f7e5d833ead0",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "112120"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "root_dir = '/mnt/datawow/lyl/images/chest/NIHChest-XRay/'\n",
    "all_images = glob.glob(root_dir+'images_*/images/*.png')\n",
    "id2img = {i.split('/')[-1]:i for i in all_images}\n",
    "len(all_images)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "9c51bd45-3936-4510-8cf8-aeeb04a5fabd",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "00030128_000.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00030128_000.png\n",
      "00030289_000.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00030289_000.png\n",
      "00028974_016.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00028974_016.png\n",
      "00030178_011.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00030178_011.png\n",
      "00029474_002.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00029474_002.png\n",
      "00030801_000.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00030801_000.png\n",
      "00028628_011.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00028628_011.png\n",
      "00030025_001.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00030025_001.png\n",
      "00029189_000.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00029189_000.png\n",
      "00029244_006.png /mnt/datawow/lyl/images/chest/NIHChest-XRay/images_012/images/00029244_006.png\n"
     ]
    }
   ],
   "source": [
    "step=0\n",
    "for k, v in id2img.items():\n",
    "    print(k,v)\n",
    "    step+=1\n",
    "    if step==10:\n",
    "        break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "d7f342a6-e8f8-48e5-bff7-8664d2cffcd6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "60566 25958\n"
     ]
    }
   ],
   "source": [
    "head_n = 910000\n",
    "train_val_ids = [i.strip() for i in open(root_dir+'train_val_list.txt','r').readlines()]\n",
    "random.shuffle(train_val_ids)\n",
    "\n",
    "train_val_ids = train_val_ids[:head_n]\n",
    "# 计算分割点\n",
    "split_ratio=0.7\n",
    "split_point = int(len(train_val_ids) * split_ratio)\n",
    "\n",
    "# 分割列表\n",
    "train_ids = train_val_ids[:split_point]\n",
    "val_ids = train_val_ids[split_point:]\n",
    "print(len(train_ids),len(val_ids))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "3a53fce0-b224-4ca2-b0c3-9d5aeb9d2886",
   "metadata": {},
   "outputs": [],
   "source": [
    "df = pd.read_csv(root_dir+'Data_Entry_2017.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "f13babe0-b09a-41a7-acec-8f756d97e998",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Image Index</th>\n",
       "      <th>Finding Labels</th>\n",
       "      <th>Follow-up #</th>\n",
       "      <th>Patient ID</th>\n",
       "      <th>Patient Age</th>\n",
       "      <th>Patient Gender</th>\n",
       "      <th>View Position</th>\n",
       "      <th>OriginalImage[Width</th>\n",
       "      <th>Height]</th>\n",
       "      <th>OriginalImagePixelSpacing[x</th>\n",
       "      <th>y]</th>\n",
       "      <th>Unnamed: 11</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>00000001_000.png</td>\n",
       "      <td>Cardiomegaly</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>58</td>\n",
       "      <td>M</td>\n",
       "      <td>PA</td>\n",
       "      <td>2682</td>\n",
       "      <td>2749</td>\n",
       "      <td>0.143</td>\n",
       "      <td>0.143</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>00000001_001.png</td>\n",
       "      <td>Cardiomegaly|Emphysema</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "      <td>58</td>\n",
       "      <td>M</td>\n",
       "      <td>PA</td>\n",
       "      <td>2894</td>\n",
       "      <td>2729</td>\n",
       "      <td>0.143</td>\n",
       "      <td>0.143</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>00000001_002.png</td>\n",
       "      <td>Cardiomegaly|Effusion</td>\n",
       "      <td>2</td>\n",
       "      <td>1</td>\n",
       "      <td>58</td>\n",
       "      <td>M</td>\n",
       "      <td>PA</td>\n",
       "      <td>2500</td>\n",
       "      <td>2048</td>\n",
       "      <td>0.168</td>\n",
       "      <td>0.168</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>00000002_000.png</td>\n",
       "      <td>No Finding</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "      <td>81</td>\n",
       "      <td>M</td>\n",
       "      <td>PA</td>\n",
       "      <td>2500</td>\n",
       "      <td>2048</td>\n",
       "      <td>0.171</td>\n",
       "      <td>0.171</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>00000003_000.png</td>\n",
       "      <td>Hernia</td>\n",
       "      <td>0</td>\n",
       "      <td>3</td>\n",
       "      <td>81</td>\n",
       "      <td>F</td>\n",
       "      <td>PA</td>\n",
       "      <td>2582</td>\n",
       "      <td>2991</td>\n",
       "      <td>0.143</td>\n",
       "      <td>0.143</td>\n",
       "      <td>NaN</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "        Image Index          Finding Labels  Follow-up #  Patient ID  \\\n",
       "0  00000001_000.png            Cardiomegaly            0           1   \n",
       "1  00000001_001.png  Cardiomegaly|Emphysema            1           1   \n",
       "2  00000001_002.png   Cardiomegaly|Effusion            2           1   \n",
       "3  00000002_000.png              No Finding            0           2   \n",
       "4  00000003_000.png                  Hernia            0           3   \n",
       "\n",
       "   Patient Age Patient Gender View Position  OriginalImage[Width  Height]  \\\n",
       "0           58              M            PA                 2682     2749   \n",
       "1           58              M            PA                 2894     2729   \n",
       "2           58              M            PA                 2500     2048   \n",
       "3           81              M            PA                 2500     2048   \n",
       "4           81              F            PA                 2582     2991   \n",
       "\n",
       "   OriginalImagePixelSpacing[x     y]  Unnamed: 11  \n",
       "0                        0.143  0.143          NaN  \n",
       "1                        0.143  0.143          NaN  \n",
       "2                        0.168  0.168          NaN  \n",
       "3                        0.171  0.171          NaN  \n",
       "4                        0.143  0.143          NaN  "
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "6e0e6f3d-922f-4597-9851-04b7c664c79d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "00000001_000.png [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n",
      "00000001_001.png [0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n",
      "00000001_002.png [0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n",
      "00000002_000.png [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0]\n",
      "00000003_000.png [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]\n",
      "00000003_001.png [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]\n",
      "00000003_002.png [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]\n",
      "00000003_003.png [0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0]\n",
      "00000003_004.png [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]\n",
      "00000003_005.png [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]\n"
     ]
    }
   ],
   "source": [
    "multi_hot_df = df['Finding Labels'].str.get_dummies(sep='|')\n",
    "\n",
    "# 合并 ID 和多热编码\n",
    "multi_hot_df = df[['Image Index']].join(multi_hot_df)\n",
    "\n",
    "# 转换为字典\n",
    "id2label = multi_hot_df.set_index('Image Index').T.to_dict('list')\n",
    "step=0\n",
    "for k, v in id2label.items():\n",
    "    print(k,v)\n",
    "    step+=1\n",
    "    if step==10:\n",
    "        break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "c9a28a04-6474-4fab-94d1-12ac7b823603",
   "metadata": {},
   "outputs": [],
   "source": [
    "train_files = [{'image':id2img[i],'label':id2label[i]} for i in train_ids]\n",
    "val_files = [{'image':id2img[i],'label':id2label[i]} for i in val_ids]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "3bc66ca2-e125-4ba8-8473-3b396ce579c4",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'image': '/mnt/datawow/lyl/images/chest/NIHChest-XRay/images_007/images/00014052_007.png',\n",
       "  'label': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0]},\n",
       " {'image': '/mnt/datawow/lyl/images/chest/NIHChest-XRay/images_006/images/00011876_000.png',\n",
       "  'label': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0]},\n",
       " {'image': '/mnt/datawow/lyl/images/chest/NIHChest-XRay/images_005/images/00009689_002.png',\n",
       "  'label': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0]},\n",
       " {'image': '/mnt/datawow/lyl/images/chest/NIHChest-XRay/images_009/images/00020297_001.png',\n",
       "  'label': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0]},\n",
       " {'image': '/mnt/datawow/lyl/images/chest/NIHChest-XRay/images_003/images/00004833_042.png',\n",
       "  'label': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0]}]"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_files[:5]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "bf104487-ec0c-4772-ba15-ff6c25c58957",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n",
      "torch.Size([10, 1, 512, 512]) torch.Size([10, 15])\n"
     ]
    }
   ],
   "source": [
    "class NIHChestXrayDataset(torch.utils.data.Dataset):\n",
    "    def __init__(self, data, transforms):\n",
    "        self.image_files = data\n",
    "        self.transforms = transforms\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.image_files)\n",
    "\n",
    "    def __getitem__(self, index):\n",
    "        return self.transforms(self.image_files[index]['image']),np.array(self.image_files[index]['label'],np.float32), self.image_files[index]\n",
    "\n",
    "\n",
    "train_ds = NIHChestXrayDataset(train_files,train_transforms)\n",
    "train_loader = DataLoader(train_ds, batch_size=10, shuffle=True, num_workers=10)\n",
    "\n",
    "val_ds = NIHChestXrayDataset(val_files,train_transforms)\n",
    "val_loader = DataLoader(val_ds, batch_size=10, shuffle=False, num_workers=10)\n",
    "\n",
    "step=0\n",
    "for x,y,_ in val_loader:\n",
    "    step+=1\n",
    "    print(x.shape, y.shape)\n",
    "    if step==10:\n",
    "        break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "5037953b-f5af-4649-9ebb-12f35d855850",
   "metadata": {},
   "outputs": [],
   "source": [
    "device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
    "model = DenseNet121(spatial_dims=2, in_channels=1, out_channels=15).to(device)\n",
    "loss_function = torch.nn.BCEWithLogitsLoss()\n",
    "optimizer = torch.optim.Adam(model.parameters(), 1e-5)\n",
    "max_epochs = 4\n",
    "val_interval = 1\n",
    "auc_metric = ROCAUCMetric()\n",
    "y_pred_trans = Compose([Activations(sigmoid=True)])\n",
    "# y_trans = Compose([AsDiscrete(to_onehot=False)])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "06b8e21a-8115-4335-a0b8-43d86b90c674",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "NIHChestXray_DenseNet121\n"
     ]
    }
   ],
   "source": [
    "dataset_name = 'NIHChestXray'\n",
    "model_name = f'{dataset_name}_{model.__class__.__name__}'\n",
    "print(model_name)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "99da10d2-f306-415a-b50a-a9fd399d64ae",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "----------\n",
      "epoch 1/4\n",
      "1/6056, train_loss: 0.7029\n",
      "2/6056, train_loss: 0.7052\n",
      "3/6056, train_loss: 0.6997\n",
      "4/6056, train_loss: 0.6956\n",
      "5/6056, train_loss: 0.6949\n",
      "6/6056, train_loss: 0.6946\n",
      "7/6056, train_loss: 0.6912\n",
      "8/6056, train_loss: 0.6934\n",
      "9/6056, train_loss: 0.6868\n",
      "10/6056, train_loss: 0.6840\n",
      "11/6056, train_loss: 0.6846\n",
      "12/6056, train_loss: 0.6752\n",
      "13/6056, train_loss: 0.6749\n",
      "14/6056, train_loss: 0.6742\n",
      "15/6056, train_loss: 0.6734\n",
      "16/6056, train_loss: 0.6696\n",
      "17/6056, train_loss: 0.6746\n",
      "18/6056, train_loss: 0.6654\n",
      "19/6056, train_loss: 0.6627\n",
      "20/6056, train_loss: 0.6604\n",
      "21/6056, train_loss: 0.6611\n",
      "22/6056, train_loss: 0.6522\n",
      "23/6056, train_loss: 0.6507\n",
      "24/6056, train_loss: 0.6525\n",
      "25/6056, train_loss: 0.6546\n",
      "26/6056, train_loss: 0.6478\n",
      "27/6056, train_loss: 0.6530\n",
      "28/6056, train_loss: 0.6496\n",
      "29/6056, train_loss: 0.6522\n",
      "30/6056, train_loss: 0.6467\n",
      "31/6056, train_loss: 0.6400\n",
      "32/6056, train_loss: 0.6399\n",
      "56/6056, train_loss: 0.5816\n",
      "57/6056, train_loss: 0.5854\n",
      "58/6056, train_loss: 0.5814\n",
      "59/6056, train_loss: 0.5961\n",
      "60/6056, train_loss: 0.5831\n",
      "61/6056, train_loss: 0.5846\n",
      "62/6056, train_loss: 0.5856\n",
      "63/6056, train_loss: 0.5682\n",
      "64/6056, train_loss: 0.5756\n",
      "65/6056, train_loss: 0.5785\n",
      "66/6056, train_loss: 0.5584\n",
      "67/6056, train_loss: 0.5620\n",
      "68/6056, train_loss: 0.5639\n",
      "69/6056, train_loss: 0.5677\n",
      "70/6056, train_loss: 0.5771\n",
      "71/6056, train_loss: 0.5738\n",
      "72/6056, train_loss: 0.5662\n",
      "73/6056, train_loss: 0.5688\n",
      "74/6056, train_loss: 0.5572\n",
      "75/6056, train_loss: 0.5647\n",
      "76/6056, train_loss: 0.5530\n",
      "77/6056, train_loss: 0.5646\n",
      "78/6056, train_loss: 0.5345\n",
      "79/6056, train_loss: 0.5492\n",
      "80/6056, train_loss: 0.5610\n",
      "81/6056, train_loss: 0.5535\n",
      "82/6056, train_loss: 0.5450\n",
      "83/6056, train_loss: 0.5464\n",
      "84/6056, train_loss: 0.5494\n",
      "85/6056, train_loss: 0.5408\n",
      "86/6056, train_loss: 0.5274\n",
      "87/6056, train_loss: 0.5473\n",
      "88/6056, train_loss: 0.5486\n",
      "89/6056, train_loss: 0.5317\n",
      "90/6056, train_loss: 0.5434\n",
      "91/6056, train_loss: 0.5226\n",
      "92/6056, train_loss: 0.5410\n",
      "93/6056, train_loss: 0.5234\n",
      "94/6056, train_loss: 0.5337\n",
      "95/6056, train_loss: 0.5321\n",
      "96/6056, train_loss: 0.5076\n",
      "97/6056, train_loss: 0.5188\n",
      "98/6056, train_loss: 0.5050\n",
      "99/6056, train_loss: 0.5124\n",
      "100/6056, train_loss: 0.5553\n",
      "101/6056, train_loss: 0.5400\n",
      "102/6056, train_loss: 0.5049\n",
      "103/6056, train_loss: 0.5272\n",
      "104/6056, train_loss: 0.5113\n",
      "105/6056, train_loss: 0.5041\n",
      "106/6056, train_loss: 0.5269\n",
      "107/6056, train_loss: 0.4964\n",
      "108/6056, train_loss: 0.5236\n",
      "109/6056, train_loss: 0.5324\n",
      "110/6056, train_loss: 0.5008\n",
      "111/6056, train_loss: 0.5041\n",
      "112/6056, train_loss: 0.4913\n",
      "113/6056, train_loss: 0.5168\n",
      "114/6056, train_loss: 0.5016\n",
      "115/6056, train_loss: 0.4993\n",
      "116/6056, train_loss: 0.4749\n",
      "117/6056, train_loss: 0.4882\n",
      "118/6056, train_loss: 0.4927\n",
      "119/6056, train_loss: 0.5084\n",
      "120/6056, train_loss: 0.5029\n",
      "121/6056, train_loss: 0.4950\n",
      "122/6056, train_loss: 0.4948\n",
      "123/6056, train_loss: 0.4751\n",
      "124/6056, train_loss: 0.4737\n",
      "125/6056, train_loss: 0.5073\n",
      "126/6056, train_loss: 0.4925\n",
      "127/6056, train_loss: 0.4814\n",
      "128/6056, train_loss: 0.4922\n",
      "129/6056, train_loss: 0.4916\n",
      "130/6056, train_loss: 0.4859\n",
      "131/6056, train_loss: 0.4746\n",
      "132/6056, train_loss: 0.4831\n",
      "133/6056, train_loss: 0.4797\n",
      "134/6056, train_loss: 0.4685\n",
      "135/6056, train_loss: 0.4887\n",
      "136/6056, train_loss: 0.4934\n",
      "137/6056, train_loss: 0.4837\n",
      "138/6056, train_loss: 0.4654\n",
      "139/6056, train_loss: 0.4656\n",
      "140/6056, train_loss: 0.4974\n",
      "141/6056, train_loss: 0.4544\n",
      "142/6056, train_loss: 0.4374\n",
      "143/6056, train_loss: 0.4580\n",
      "144/6056, train_loss: 0.4675\n",
      "145/6056, train_loss: 0.4469\n",
      "146/6056, train_loss: 0.4643\n",
      "147/6056, train_loss: 0.4670\n",
      "148/6056, train_loss: 0.4530\n",
      "149/6056, train_loss: 0.4827\n",
      "150/6056, train_loss: 0.4645\n",
      "151/6056, train_loss: 0.4365\n",
      "152/6056, train_loss: 0.4563\n",
      "153/6056, train_loss: 0.4532\n",
      "154/6056, train_loss: 0.4360\n",
      "155/6056, train_loss: 0.4470\n",
      "156/6056, train_loss: 0.4371\n",
      "157/6056, train_loss: 0.4291\n",
      "158/6056, train_loss: 0.4366\n",
      "159/6056, train_loss: 0.4466\n",
      "160/6056, train_loss: 0.4487\n",
      "161/6056, train_loss: 0.4861\n",
      "162/6056, train_loss: 0.4244\n",
      "163/6056, train_loss: 0.4446\n",
      "164/6056, train_loss: 0.4572\n",
      "165/6056, train_loss: 0.4410\n",
      "166/6056, train_loss: 0.4495\n",
      "167/6056, train_loss: 0.4330\n",
      "168/6056, train_loss: 0.4448\n",
      "169/6056, train_loss: 0.4392\n",
      "170/6056, train_loss: 0.4376\n",
      "171/6056, train_loss: 0.4425\n",
      "172/6056, train_loss: 0.4228\n",
      "173/6056, train_loss: 0.4401\n",
      "174/6056, train_loss: 0.4886\n",
      "175/6056, train_loss: 0.4714\n",
      "176/6056, train_loss: 0.4223\n",
      "177/6056, train_loss: 0.4492\n",
      "178/6056, train_loss: 0.4042\n",
      "179/6056, train_loss: 0.4133\n",
      "180/6056, train_loss: 0.4491\n",
      "181/6056, train_loss: 0.4047\n",
      "182/6056, train_loss: 0.4109\n",
      "183/6056, train_loss: 0.4006\n",
      "184/6056, train_loss: 0.4474\n",
      "185/6056, train_loss: 0.4162\n",
      "186/6056, train_loss: 0.4111\n",
      "187/6056, train_loss: 0.4025\n",
      "188/6056, train_loss: 0.3983\n",
      "189/6056, train_loss: 0.3929\n",
      "190/6056, train_loss: 0.3986\n",
      "191/6056, train_loss: 0.4193\n",
      "192/6056, train_loss: 0.4411\n",
      "193/6056, train_loss: 0.4247\n",
      "194/6056, train_loss: 0.4112\n",
      "195/6056, train_loss: 0.4307\n",
      "196/6056, train_loss: 0.4290\n",
      "197/6056, train_loss: 0.4011\n",
      "198/6056, train_loss: 0.4269\n",
      "199/6056, train_loss: 0.4255\n",
      "200/6056, train_loss: 0.4275\n",
      "201/6056, train_loss: 0.4047\n",
      "202/6056, train_loss: 0.3952\n",
      "203/6056, train_loss: 0.4253\n",
      "204/6056, train_loss: 0.4060\n",
      "205/6056, train_loss: 0.4375\n",
      "206/6056, train_loss: 0.4046\n",
      "207/6056, train_loss: 0.4554\n",
      "208/6056, train_loss: 0.4068\n",
      "209/6056, train_loss: 0.4056\n",
      "210/6056, train_loss: 0.4065\n",
      "211/6056, train_loss: 0.3955\n",
      "212/6056, train_loss: 0.4190\n",
      "213/6056, train_loss: 0.4120\n",
      "214/6056, train_loss: 0.3636\n",
      "215/6056, train_loss: 0.4230\n",
      "216/6056, train_loss: 0.3712\n",
      "217/6056, train_loss: 0.4098\n",
      "218/6056, train_loss: 0.4193\n",
      "219/6056, train_loss: 0.4372\n",
      "220/6056, train_loss: 0.3596\n",
      "221/6056, train_loss: 0.4031\n",
      "222/6056, train_loss: 0.4117\n",
      "223/6056, train_loss: 0.3835\n",
      "224/6056, train_loss: 0.4051\n",
      "225/6056, train_loss: 0.3678\n",
      "226/6056, train_loss: 0.3675\n",
      "227/6056, train_loss: 0.4078\n",
      "228/6056, train_loss: 0.4267\n",
      "229/6056, train_loss: 0.3814\n",
      "230/6056, train_loss: 0.4207\n",
      "231/6056, train_loss: 0.4405\n",
      "232/6056, train_loss: 0.3564\n",
      "233/6056, train_loss: 0.3417\n",
      "234/6056, train_loss: 0.3855\n",
      "235/6056, train_loss: 0.3891\n",
      "236/6056, train_loss: 0.4214\n",
      "237/6056, train_loss: 0.4326\n",
      "238/6056, train_loss: 0.4145\n",
      "239/6056, train_loss: 0.4279\n",
      "240/6056, train_loss: 0.3478\n",
      "241/6056, train_loss: 0.3545\n",
      "242/6056, train_loss: 0.3837\n",
      "243/6056, train_loss: 0.3826\n",
      "244/6056, train_loss: 0.3833\n",
      "245/6056, train_loss: 0.3669\n",
      "246/6056, train_loss: 0.4022\n",
      "247/6056, train_loss: 0.4225\n",
      "248/6056, train_loss: 0.3934\n",
      "249/6056, train_loss: 0.3758\n",
      "250/6056, train_loss: 0.3391\n",
      "251/6056, train_loss: 0.3694\n",
      "252/6056, train_loss: 0.3543\n",
      "253/6056, train_loss: 0.3371\n",
      "254/6056, train_loss: 0.3544\n",
      "255/6056, train_loss: 0.3689\n",
      "256/6056, train_loss: 0.3676\n",
      "257/6056, train_loss: 0.3632\n",
      "258/6056, train_loss: 0.3498\n",
      "259/6056, train_loss: 0.3552\n",
      "260/6056, train_loss: 0.3486\n",
      "261/6056, train_loss: 0.3674\n",
      "262/6056, train_loss: 0.3817\n",
      "263/6056, train_loss: 0.3485\n",
      "264/6056, train_loss: 0.3528\n",
      "265/6056, train_loss: 0.3530\n",
      "266/6056, train_loss: 0.3520\n",
      "267/6056, train_loss: 0.3390\n",
      "268/6056, train_loss: 0.4053\n",
      "269/6056, train_loss: 0.3621\n",
      "270/6056, train_loss: 0.3424\n",
      "271/6056, train_loss: 0.3873\n",
      "272/6056, train_loss: 0.3484\n",
      "273/6056, train_loss: 0.3690\n",
      "274/6056, train_loss: 0.3106\n",
      "275/6056, train_loss: 0.3478\n",
      "276/6056, train_loss: 0.3639\n",
      "277/6056, train_loss: 0.3286\n",
      "278/6056, train_loss: 0.3690\n",
      "279/6056, train_loss: 0.3557\n",
      "280/6056, train_loss: 0.3957\n",
      "281/6056, train_loss: 0.3746\n",
      "282/6056, train_loss: 0.3880\n",
      "283/6056, train_loss: 0.4187\n",
      "284/6056, train_loss: 0.3355\n",
      "285/6056, train_loss: 0.3782\n",
      "286/6056, train_loss: 0.3535\n",
      "287/6056, train_loss: 0.3633\n",
      "288/6056, train_loss: 0.3363\n",
      "289/6056, train_loss: 0.4002\n",
      "290/6056, train_loss: 0.3420\n",
      "291/6056, train_loss: 0.3309\n",
      "292/6056, train_loss: 0.3543\n",
      "293/6056, train_loss: 0.3581\n",
      "294/6056, train_loss: 0.3107\n",
      "295/6056, train_loss: 0.3447\n",
      "296/6056, train_loss: 0.3463\n",
      "297/6056, train_loss: 0.3677\n",
      "298/6056, train_loss: 0.3465\n",
      "299/6056, train_loss: 0.3049\n",
      "300/6056, train_loss: 0.3528\n",
      "301/6056, train_loss: 0.3215\n",
      "302/6056, train_loss: 0.3725\n",
      "303/6056, train_loss: 0.3588\n",
      "304/6056, train_loss: 0.3219\n",
      "305/6056, train_loss: 0.3794\n",
      "306/6056, train_loss: 0.3403\n",
      "307/6056, train_loss: 0.3275\n",
      "308/6056, train_loss: 0.3132\n",
      "309/6056, train_loss: 0.3571\n",
      "310/6056, train_loss: 0.3646\n",
      "311/6056, train_loss: 0.3567\n",
      "312/6056, train_loss: 0.3468\n",
      "313/6056, train_loss: 0.3660\n",
      "314/6056, train_loss: 0.3545\n",
      "315/6056, train_loss: 0.3175\n",
      "316/6056, train_loss: 0.3193\n",
      "317/6056, train_loss: 0.3052\n",
      "318/6056, train_loss: 0.3170\n",
      "319/6056, train_loss: 0.3524\n",
      "320/6056, train_loss: 0.3615\n",
      "321/6056, train_loss: 0.3452\n",
      "322/6056, train_loss: 0.3111\n",
      "323/6056, train_loss: 0.3167\n",
      "324/6056, train_loss: 0.3184\n",
      "325/6056, train_loss: 0.3330\n",
      "326/6056, train_loss: 0.3175\n",
      "327/6056, train_loss: 0.3135\n",
      "328/6056, train_loss: 0.3447\n",
      "329/6056, train_loss: 0.3117\n",
      "330/6056, train_loss: 0.3348\n",
      "331/6056, train_loss: 0.3218\n",
      "332/6056, train_loss: 0.3119\n",
      "333/6056, train_loss: 0.3006\n",
      "334/6056, train_loss: 0.3622\n",
      "335/6056, train_loss: 0.3377\n",
      "336/6056, train_loss: 0.3138\n",
      "337/6056, train_loss: 0.3073\n",
      "338/6056, train_loss: 0.3683\n",
      "339/6056, train_loss: 0.3701\n",
      "340/6056, train_loss: 0.3255\n",
      "341/6056, train_loss: 0.3321\n",
      "342/6056, train_loss: 0.3196\n",
      "343/6056, train_loss: 0.3251\n",
      "344/6056, train_loss: 0.3312\n",
      "345/6056, train_loss: 0.3187\n",
      "346/6056, train_loss: 0.3921\n",
      "347/6056, train_loss: 0.3075\n",
      "348/6056, train_loss: 0.3364\n",
      "349/6056, train_loss: 0.3514\n",
      "350/6056, train_loss: 0.3583\n",
      "351/6056, train_loss: 0.3225\n",
      "352/6056, train_loss: 0.3131\n",
      "353/6056, train_loss: 0.3466\n",
      "354/6056, train_loss: 0.3053\n",
      "355/6056, train_loss: 0.3189\n",
      "356/6056, train_loss: 0.3199\n",
      "357/6056, train_loss: 0.3313\n",
      "358/6056, train_loss: 0.2721\n",
      "359/6056, train_loss: 0.3188\n",
      "360/6056, train_loss: 0.3293\n",
      "361/6056, train_loss: 0.3065\n",
      "362/6056, train_loss: 0.2804\n",
      "363/6056, train_loss: 0.2976\n",
      "364/6056, train_loss: 0.3457\n",
      "365/6056, train_loss: 0.2671\n",
      "366/6056, train_loss: 0.2679\n",
      "367/6056, train_loss: 0.3683\n",
      "368/6056, train_loss: 0.3263\n",
      "369/6056, train_loss: 0.3739\n",
      "370/6056, train_loss: 0.2870\n",
      "371/6056, train_loss: 0.2882\n",
      "372/6056, train_loss: 0.3200\n",
      "373/6056, train_loss: 0.2754\n",
      "374/6056, train_loss: 0.2736\n",
      "375/6056, train_loss: 0.3140\n",
      "376/6056, train_loss: 0.3103\n",
      "377/6056, train_loss: 0.3529\n",
      "378/6056, train_loss: 0.3259\n",
      "379/6056, train_loss: 0.2911\n",
      "380/6056, train_loss: 0.2906\n",
      "381/6056, train_loss: 0.3066\n",
      "382/6056, train_loss: 0.3233\n",
      "383/6056, train_loss: 0.2681\n",
      "384/6056, train_loss: 0.3599\n",
      "385/6056, train_loss: 0.3091\n",
      "386/6056, train_loss: 0.3337\n",
      "387/6056, train_loss: 0.3400\n",
      "388/6056, train_loss: 0.3164\n",
      "389/6056, train_loss: 0.2989\n",
      "390/6056, train_loss: 0.2792\n",
      "391/6056, train_loss: 0.3427\n",
      "392/6056, train_loss: 0.2735\n",
      "393/6056, train_loss: 0.2724\n",
      "394/6056, train_loss: 0.2796\n",
      "395/6056, train_loss: 0.2839\n",
      "396/6056, train_loss: 0.2802\n",
      "397/6056, train_loss: 0.3289\n",
      "398/6056, train_loss: 0.2849\n",
      "399/6056, train_loss: 0.2686\n",
      "400/6056, train_loss: 0.2481\n",
      "401/6056, train_loss: 0.3133\n",
      "402/6056, train_loss: 0.2672\n",
      "403/6056, train_loss: 0.3218\n",
      "404/6056, train_loss: 0.3044\n",
      "405/6056, train_loss: 0.2861\n",
      "406/6056, train_loss: 0.2889\n",
      "407/6056, train_loss: 0.2588\n",
      "408/6056, train_loss: 0.3131\n",
      "409/6056, train_loss: 0.2862\n",
      "410/6056, train_loss: 0.2582\n",
      "411/6056, train_loss: 0.2909\n",
      "412/6056, train_loss: 0.3081\n",
      "413/6056, train_loss: 0.2933\n",
      "414/6056, train_loss: 0.2744\n",
      "415/6056, train_loss: 0.3066\n",
      "416/6056, train_loss: 0.3212\n",
      "417/6056, train_loss: 0.2867\n",
      "418/6056, train_loss: 0.2770\n",
      "419/6056, train_loss: 0.2934\n",
      "420/6056, train_loss: 0.2545\n",
      "421/6056, train_loss: 0.2706\n",
      "422/6056, train_loss: 0.2722\n",
      "423/6056, train_loss: 0.3333\n",
      "424/6056, train_loss: 0.3307\n",
      "425/6056, train_loss: 0.2860\n",
      "426/6056, train_loss: 0.2745\n",
      "427/6056, train_loss: 0.3360\n",
      "428/6056, train_loss: 0.2716\n",
      "429/6056, train_loss: 0.2691\n",
      "430/6056, train_loss: 0.2972\n",
      "431/6056, train_loss: 0.2476\n",
      "432/6056, train_loss: 0.2939\n",
      "433/6056, train_loss: 0.2605\n",
      "434/6056, train_loss: 0.3363\n",
      "435/6056, train_loss: 0.2360\n",
      "436/6056, train_loss: 0.2880\n",
      "437/6056, train_loss: 0.3076\n",
      "438/6056, train_loss: 0.3182\n",
      "439/6056, train_loss: 0.2799\n",
      "440/6056, train_loss: 0.3380\n",
      "441/6056, train_loss: 0.3290\n",
      "442/6056, train_loss: 0.3147\n",
      "443/6056, train_loss: 0.2837\n",
      "444/6056, train_loss: 0.2969\n",
      "445/6056, train_loss: 0.2320\n",
      "446/6056, train_loss: 0.2800\n",
      "447/6056, train_loss: 0.2543\n",
      "448/6056, train_loss: 0.2758\n",
      "449/6056, train_loss: 0.2521\n",
      "450/6056, train_loss: 0.2967\n",
      "451/6056, train_loss: 0.2513\n",
      "452/6056, train_loss: 0.3051\n",
      "453/6056, train_loss: 0.2520\n",
      "454/6056, train_loss: 0.2949\n",
      "455/6056, train_loss: 0.3124\n",
      "456/6056, train_loss: 0.2782\n",
      "457/6056, train_loss: 0.2845\n",
      "458/6056, train_loss: 0.2444\n",
      "459/6056, train_loss: 0.3250\n",
      "460/6056, train_loss: 0.3159\n",
      "461/6056, train_loss: 0.2824\n",
      "462/6056, train_loss: 0.3136\n",
      "463/6056, train_loss: 0.2987\n",
      "464/6056, train_loss: 0.2366\n",
      "465/6056, train_loss: 0.2647\n",
      "466/6056, train_loss: 0.2733\n",
      "467/6056, train_loss: 0.3010\n",
      "468/6056, train_loss: 0.2624\n",
      "469/6056, train_loss: 0.2388\n",
      "470/6056, train_loss: 0.2808\n",
      "471/6056, train_loss: 0.3257\n",
      "472/6056, train_loss: 0.2578\n",
      "473/6056, train_loss: 0.2476\n",
      "474/6056, train_loss: 0.2607\n",
      "475/6056, train_loss: 0.2668\n",
      "494/6056, train_loss: 0.2711\n",
      "495/6056, train_loss: 0.2651\n",
      "496/6056, train_loss: 0.2774\n",
      "497/6056, train_loss: 0.2338\n",
      "498/6056, train_loss: 0.2854\n",
      "499/6056, train_loss: 0.2357\n",
      "500/6056, train_loss: 0.2832\n",
      "501/6056, train_loss: 0.2739\n",
      "502/6056, train_loss: 0.2764\n",
      "503/6056, train_loss: 0.2685\n",
      "504/6056, train_loss: 0.2727\n",
      "505/6056, train_loss: 0.3164\n",
      "506/6056, train_loss: 0.3092\n",
      "507/6056, train_loss: 0.2467\n",
      "508/6056, train_loss: 0.2942\n",
      "509/6056, train_loss: 0.3269\n",
      "510/6056, train_loss: 0.2460\n",
      "511/6056, train_loss: 0.2459\n",
      "512/6056, train_loss: 0.3670\n",
      "513/6056, train_loss: 0.2542\n",
      "514/6056, train_loss: 0.3069\n",
      "515/6056, train_loss: 0.2898\n",
      "516/6056, train_loss: 0.3105\n",
      "517/6056, train_loss: 0.2743\n",
      "518/6056, train_loss: 0.2814\n",
      "519/6056, train_loss: 0.2378\n",
      "520/6056, train_loss: 0.2192\n",
      "521/6056, train_loss: 0.2235\n",
      "522/6056, train_loss: 0.2553\n",
      "523/6056, train_loss: 0.2280\n",
      "524/6056, train_loss: 0.2324\n",
      "525/6056, train_loss: 0.2844\n",
      "526/6056, train_loss: 0.2945\n",
      "527/6056, train_loss: 0.3081\n",
      "528/6056, train_loss: 0.2794\n",
      "529/6056, train_loss: 0.2785\n",
      "530/6056, train_loss: 0.2827\n",
      "531/6056, train_loss: 0.2422\n",
      "532/6056, train_loss: 0.2435\n",
      "533/6056, train_loss: 0.2311\n",
      "534/6056, train_loss: 0.3002\n",
      "535/6056, train_loss: 0.2344\n",
      "536/6056, train_loss: 0.2652\n",
      "537/6056, train_loss: 0.2416\n",
      "538/6056, train_loss: 0.2940\n",
      "539/6056, train_loss: 0.2852\n",
      "540/6056, train_loss: 0.2866\n",
      "541/6056, train_loss: 0.2893\n",
      "542/6056, train_loss: 0.3185\n",
      "543/6056, train_loss: 0.3288\n",
      "544/6056, train_loss: 0.2250\n",
      "545/6056, train_loss: 0.2758\n",
      "546/6056, train_loss: 0.2738\n",
      "547/6056, train_loss: 0.2356\n",
      "548/6056, train_loss: 0.2570\n",
      "549/6056, train_loss: 0.2454\n",
      "550/6056, train_loss: 0.2380\n",
      "551/6056, train_loss: 0.2161\n",
      "552/6056, train_loss: 0.2174\n",
      "553/6056, train_loss: 0.2904\n",
      "554/6056, train_loss: 0.2284\n",
      "555/6056, train_loss: 0.2711\n",
      "556/6056, train_loss: 0.2269\n",
      "557/6056, train_loss: 0.2957\n",
      "558/6056, train_loss: 0.2914\n",
      "559/6056, train_loss: 0.2664\n",
      "560/6056, train_loss: 0.2564\n",
      "561/6056, train_loss: 0.2980\n",
      "562/6056, train_loss: 0.2623\n",
      "563/6056, train_loss: 0.3218\n",
      "564/6056, train_loss: 0.2644\n",
      "565/6056, train_loss: 0.2760\n",
      "566/6056, train_loss: 0.2691\n",
      "567/6056, train_loss: 0.2070\n",
      "568/6056, train_loss: 0.2902\n",
      "569/6056, train_loss: 0.2213\n",
      "570/6056, train_loss: 0.2251\n",
      "571/6056, train_loss: 0.2258\n",
      "572/6056, train_loss: 0.2309\n",
      "573/6056, train_loss: 0.2568\n",
      "574/6056, train_loss: 0.2241\n",
      "575/6056, train_loss: 0.2193\n",
      "576/6056, train_loss: 0.2812\n",
      "577/6056, train_loss: 0.2703\n",
      "578/6056, train_loss: 0.2535\n",
      "579/6056, train_loss: 0.2755\n",
      "580/6056, train_loss: 0.2556\n",
      "581/6056, train_loss: 0.2271\n",
      "582/6056, train_loss: 0.2249\n",
      "583/6056, train_loss: 0.2232\n",
      "584/6056, train_loss: 0.2811\n",
      "585/6056, train_loss: 0.2253\n",
      "586/6056, train_loss: 0.2296\n",
      "587/6056, train_loss: 0.2190\n",
      "588/6056, train_loss: 0.2231\n",
      "589/6056, train_loss: 0.2500\n",
      "590/6056, train_loss: 0.2434\n",
      "591/6056, train_loss: 0.2930\n",
      "592/6056, train_loss: 0.2907\n",
      "593/6056, train_loss: 0.3113\n",
      "594/6056, train_loss: 0.2162\n",
      "595/6056, train_loss: 0.2963\n",
      "596/6056, train_loss: 0.2555\n",
      "597/6056, train_loss: 0.2707\n",
      "598/6056, train_loss: 0.2800\n",
      "599/6056, train_loss: 0.2671\n",
      "600/6056, train_loss: 0.2609\n",
      "601/6056, train_loss: 0.2427\n",
      "602/6056, train_loss: 0.2168\n",
      "603/6056, train_loss: 0.2229\n",
      "604/6056, train_loss: 0.2146\n",
      "605/6056, train_loss: 0.2753\n",
      "606/6056, train_loss: 0.2480\n",
      "607/6056, train_loss: 0.2136\n",
      "608/6056, train_loss: 0.2797\n",
      "609/6056, train_loss: 0.3344\n",
      "610/6056, train_loss: 0.2543\n",
      "611/6056, train_loss: 0.2940\n",
      "612/6056, train_loss: 0.2891\n",
      "613/6056, train_loss: 0.3300\n",
      "614/6056, train_loss: 0.2747\n",
      "615/6056, train_loss: 0.2337\n",
      "616/6056, train_loss: 0.2199\n",
      "617/6056, train_loss: 0.2457\n",
      "618/6056, train_loss: 0.2991\n",
      "619/6056, train_loss: 0.2044\n",
      "620/6056, train_loss: 0.2280\n",
      "621/6056, train_loss: 0.2718\n",
      "622/6056, train_loss: 0.2253\n",
      "623/6056, train_loss: 0.2174\n",
      "624/6056, train_loss: 0.1933\n",
      "625/6056, train_loss: 0.2408\n",
      "626/6056, train_loss: 0.2590\n",
      "627/6056, train_loss: 0.2888\n",
      "628/6056, train_loss: 0.1955\n",
      "629/6056, train_loss: 0.2817\n",
      "630/6056, train_loss: 0.2137\n",
      "631/6056, train_loss: 0.2187\n",
      "632/6056, train_loss: 0.2562\n",
      "633/6056, train_loss: 0.3291\n",
      "634/6056, train_loss: 0.2777\n",
      "635/6056, train_loss: 0.2648\n",
      "636/6056, train_loss: 0.2178\n",
      "637/6056, train_loss: 0.2851\n",
      "638/6056, train_loss: 0.2487\n",
      "639/6056, train_loss: 0.2518\n",
      "640/6056, train_loss: 0.2073\n",
      "641/6056, train_loss: 0.1979\n",
      "642/6056, train_loss: 0.1779\n",
      "643/6056, train_loss: 0.3454\n",
      "644/6056, train_loss: 0.2975\n",
      "645/6056, train_loss: 0.2612\n",
      "646/6056, train_loss: 0.3307\n",
      "647/6056, train_loss: 0.2543\n",
      "648/6056, train_loss: 0.2439\n",
      "649/6056, train_loss: 0.2690\n",
      "650/6056, train_loss: 0.2414\n",
      "651/6056, train_loss: 0.2236\n",
      "652/6056, train_loss: 0.2554\n",
      "653/6056, train_loss: 0.1923\n",
      "654/6056, train_loss: 0.2727\n",
      "655/6056, train_loss: 0.2909\n",
      "656/6056, train_loss: 0.2428\n",
      "657/6056, train_loss: 0.2467\n",
      "658/6056, train_loss: 0.3065\n",
      "659/6056, train_loss: 0.2831\n",
      "660/6056, train_loss: 0.2778\n",
      "661/6056, train_loss: 0.2495\n",
      "662/6056, train_loss: 0.2546\n",
      "663/6056, train_loss: 0.2573\n",
      "664/6056, train_loss: 0.2080\n",
      "665/6056, train_loss: 0.2397\n",
      "666/6056, train_loss: 0.2557\n",
      "667/6056, train_loss: 0.2598\n",
      "668/6056, train_loss: 0.2473\n",
      "669/6056, train_loss: 0.2389\n",
      "670/6056, train_loss: 0.2063\n",
      "671/6056, train_loss: 0.1932\n",
      "672/6056, train_loss: 0.2159\n",
      "673/6056, train_loss: 0.2183\n",
      "674/6056, train_loss: 0.2277\n",
      "675/6056, train_loss: 0.2476\n",
      "676/6056, train_loss: 0.2143\n",
      "677/6056, train_loss: 0.2001\n",
      "678/6056, train_loss: 0.2526\n",
      "679/6056, train_loss: 0.3520\n",
      "680/6056, train_loss: 0.2306\n",
      "681/6056, train_loss: 0.2279\n",
      "682/6056, train_loss: 0.2927\n",
      "683/6056, train_loss: 0.2240\n",
      "684/6056, train_loss: 0.2266\n",
      "685/6056, train_loss: 0.2432\n",
      "686/6056, train_loss: 0.1882\n",
      "687/6056, train_loss: 0.1929\n",
      "688/6056, train_loss: 0.2616\n",
      "689/6056, train_loss: 0.2436\n",
      "690/6056, train_loss: 0.3182\n",
      "691/6056, train_loss: 0.2132\n",
      "692/6056, train_loss: 0.2375\n",
      "693/6056, train_loss: 0.2253\n",
      "694/6056, train_loss: 0.2424\n",
      "695/6056, train_loss: 0.2403\n",
      "696/6056, train_loss: 0.3226\n",
      "697/6056, train_loss: 0.2762\n",
      "698/6056, train_loss: 0.1967\n",
      "699/6056, train_loss: 0.2810\n",
      "700/6056, train_loss: 0.2155\n",
      "701/6056, train_loss: 0.2426\n",
      "702/6056, train_loss: 0.2540\n",
      "703/6056, train_loss: 0.2519\n",
      "704/6056, train_loss: 0.2907\n",
      "705/6056, train_loss: 0.2486\n",
      "706/6056, train_loss: 0.2487\n",
      "707/6056, train_loss: 0.3071\n",
      "708/6056, train_loss: 0.2206\n",
      "709/6056, train_loss: 0.1902\n",
      "710/6056, train_loss: 0.1731\n",
      "711/6056, train_loss: 0.2743\n",
      "712/6056, train_loss: 0.2395\n",
      "713/6056, train_loss: 0.2671\n",
      "714/6056, train_loss: 0.2131\n",
      "715/6056, train_loss: 0.1971\n",
      "716/6056, train_loss: 0.2443\n",
      "717/6056, train_loss: 0.2095\n",
      "718/6056, train_loss: 0.2084\n",
      "719/6056, train_loss: 0.2540\n",
      "720/6056, train_loss: 0.2705\n",
      "721/6056, train_loss: 0.2924\n",
      "722/6056, train_loss: 0.2997\n",
      "723/6056, train_loss: 0.2047\n",
      "724/6056, train_loss: 0.1998\n",
      "725/6056, train_loss: 0.2211\n",
      "726/6056, train_loss: 0.2302\n",
      "727/6056, train_loss: 0.1771\n",
      "728/6056, train_loss: 0.2347\n",
      "729/6056, train_loss: 0.2788\n",
      "730/6056, train_loss: 0.2446\n",
      "731/6056, train_loss: 0.2779\n",
      "732/6056, train_loss: 0.2577\n",
      "733/6056, train_loss: 0.2289\n",
      "734/6056, train_loss: 0.2965\n",
      "735/6056, train_loss: 0.2744\n",
      "736/6056, train_loss: 0.2430\n",
      "737/6056, train_loss: 0.2451\n",
      "738/6056, train_loss: 0.2187\n",
      "739/6056, train_loss: 0.2051\n",
      "740/6056, train_loss: 0.2559\n",
      "741/6056, train_loss: 0.2412\n",
      "742/6056, train_loss: 0.2174\n",
      "743/6056, train_loss: 0.1905\n",
      "744/6056, train_loss: 0.2914\n",
      "745/6056, train_loss: 0.2082\n",
      "746/6056, train_loss: 0.2773\n",
      "747/6056, train_loss: 0.2175\n",
      "748/6056, train_loss: 0.1758\n",
      "749/6056, train_loss: 0.2026\n",
      "750/6056, train_loss: 0.2212\n",
      "751/6056, train_loss: 0.2002\n",
      "752/6056, train_loss: 0.2920\n",
      "753/6056, train_loss: 0.2569\n",
      "754/6056, train_loss: 0.2480\n",
      "755/6056, train_loss: 0.1958\n",
      "756/6056, train_loss: 0.2070\n",
      "757/6056, train_loss: 0.3121\n",
      "758/6056, train_loss: 0.2624\n",
      "759/6056, train_loss: 0.3125\n",
      "760/6056, train_loss: 0.1967\n",
      "761/6056, train_loss: 0.3276\n",
      "762/6056, train_loss: 0.1777\n",
      "763/6056, train_loss: 0.2316\n",
      "764/6056, train_loss: 0.1904\n",
      "765/6056, train_loss: 0.1784\n",
      "766/6056, train_loss: 0.2029\n",
      "767/6056, train_loss: 0.2189\n",
      "768/6056, train_loss: 0.2740\n",
      "769/6056, train_loss: 0.2062\n",
      "770/6056, train_loss: 0.1719\n",
      "771/6056, train_loss: 0.2275\n",
      "772/6056, train_loss: 0.2630\n",
      "773/6056, train_loss: 0.1852\n",
      "774/6056, train_loss: 0.2325\n",
      "775/6056, train_loss: 0.2577\n",
      "776/6056, train_loss: 0.2074\n",
      "777/6056, train_loss: 0.1973\n",
      "778/6056, train_loss: 0.2404\n",
      "779/6056, train_loss: 0.1974\n",
      "780/6056, train_loss: 0.2301\n",
      "781/6056, train_loss: 0.1944\n",
      "782/6056, train_loss: 0.2304\n",
      "783/6056, train_loss: 0.1748\n",
      "784/6056, train_loss: 0.2093\n",
      "785/6056, train_loss: 0.1888\n",
      "786/6056, train_loss: 0.2099\n",
      "787/6056, train_loss: 0.1850\n",
      "788/6056, train_loss: 0.1754\n",
      "789/6056, train_loss: 0.2052\n",
      "790/6056, train_loss: 0.2401\n",
      "791/6056, train_loss: 0.2269\n",
      "792/6056, train_loss: 0.2238\n",
      "793/6056, train_loss: 0.1751\n",
      "794/6056, train_loss: 0.1759\n",
      "795/6056, train_loss: 0.2323\n",
      "796/6056, train_loss: 0.2486\n",
      "797/6056, train_loss: 0.2125\n",
      "798/6056, train_loss: 0.3298\n",
      "799/6056, train_loss: 0.1728\n",
      "800/6056, train_loss: 0.1995\n",
      "801/6056, train_loss: 0.1860\n",
      "802/6056, train_loss: 0.2353\n",
      "803/6056, train_loss: 0.2449\n",
      "804/6056, train_loss: 0.1719\n",
      "805/6056, train_loss: 0.2030\n",
      "806/6056, train_loss: 0.1834\n",
      "807/6056, train_loss: 0.2489\n",
      "808/6056, train_loss: 0.3294\n",
      "809/6056, train_loss: 0.1894\n",
      "810/6056, train_loss: 0.1951\n",
      "811/6056, train_loss: 0.2136\n",
      "812/6056, train_loss: 0.2820\n",
      "813/6056, train_loss: 0.2647\n",
      "814/6056, train_loss: 0.1526\n",
      "815/6056, train_loss: 0.1889\n",
      "816/6056, train_loss: 0.2116\n",
      "817/6056, train_loss: 0.2215\n",
      "818/6056, train_loss: 0.1988\n",
      "819/6056, train_loss: 0.2913\n",
      "820/6056, train_loss: 0.2539\n",
      "821/6056, train_loss: 0.1728\n",
      "822/6056, train_loss: 0.2151\n",
      "823/6056, train_loss: 0.2594\n",
      "824/6056, train_loss: 0.1851\n",
      "825/6056, train_loss: 0.1381\n",
      "826/6056, train_loss: 0.2681\n",
      "827/6056, train_loss: 0.2382\n",
      "828/6056, train_loss: 0.1640\n",
      "829/6056, train_loss: 0.2276\n",
      "830/6056, train_loss: 0.2277\n",
      "831/6056, train_loss: 0.2333\n",
      "832/6056, train_loss: 0.1834\n",
      "833/6056, train_loss: 0.1853\n",
      "834/6056, train_loss: 0.2395\n",
      "835/6056, train_loss: 0.1882\n",
      "836/6056, train_loss: 0.2433\n",
      "837/6056, train_loss: 0.2774\n",
      "838/6056, train_loss: 0.2319\n",
      "839/6056, train_loss: 0.2911\n",
      "840/6056, train_loss: 0.2017\n",
      "841/6056, train_loss: 0.1888\n",
      "842/6056, train_loss: 0.3070\n",
      "843/6056, train_loss: 0.2013\n",
      "844/6056, train_loss: 0.1694\n",
      "845/6056, train_loss: 0.1655\n",
      "846/6056, train_loss: 0.2627\n",
      "847/6056, train_loss: 0.2846\n",
      "848/6056, train_loss: 0.2976\n",
      "849/6056, train_loss: 0.2418\n",
      "850/6056, train_loss: 0.2243\n",
      "851/6056, train_loss: 0.2778\n",
      "852/6056, train_loss: 0.1955\n",
      "853/6056, train_loss: 0.2865\n",
      "854/6056, train_loss: 0.2201\n",
      "855/6056, train_loss: 0.2293\n",
      "856/6056, train_loss: 0.1941\n",
      "857/6056, train_loss: 0.1846\n",
      "858/6056, train_loss: 0.1986\n",
      "859/6056, train_loss: 0.2430\n",
      "860/6056, train_loss: 0.2504\n",
      "861/6056, train_loss: 0.1981\n",
      "862/6056, train_loss: 0.2562\n",
      "863/6056, train_loss: 0.1329\n",
      "864/6056, train_loss: 0.1785\n",
      "865/6056, train_loss: 0.1880\n",
      "866/6056, train_loss: 0.2112\n",
      "867/6056, train_loss: 0.2951\n",
      "868/6056, train_loss: 0.2407\n",
      "869/6056, train_loss: 0.2359\n",
      "870/6056, train_loss: 0.1956\n",
      "871/6056, train_loss: 0.2173\n",
      "872/6056, train_loss: 0.2324\n",
      "873/6056, train_loss: 0.2217\n",
      "874/6056, train_loss: 0.1796\n",
      "875/6056, train_loss: 0.1709\n",
      "876/6056, train_loss: 0.2156\n",
      "877/6056, train_loss: 0.1807\n",
      "878/6056, train_loss: 0.2032\n",
      "879/6056, train_loss: 0.1970\n",
      "880/6056, train_loss: 0.2178\n",
      "881/6056, train_loss: 0.1762\n",
      "882/6056, train_loss: 0.2745\n",
      "883/6056, train_loss: 0.1790\n",
      "884/6056, train_loss: 0.2179\n",
      "885/6056, train_loss: 0.2871\n",
      "886/6056, train_loss: 0.1625\n",
      "887/6056, train_loss: 0.1755\n",
      "888/6056, train_loss: 0.2614\n",
      "889/6056, train_loss: 0.2921\n",
      "890/6056, train_loss: 0.1738\n",
      "891/6056, train_loss: 0.2366\n",
      "892/6056, train_loss: 0.1967\n",
      "893/6056, train_loss: 0.2729\n",
      "894/6056, train_loss: 0.2072\n",
      "895/6056, train_loss: 0.2058\n",
      "896/6056, train_loss: 0.1755\n",
      "897/6056, train_loss: 0.1610\n",
      "898/6056, train_loss: 0.2198\n",
      "899/6056, train_loss: 0.2351\n",
      "900/6056, train_loss: 0.2099\n",
      "901/6056, train_loss: 0.2332\n",
      "902/6056, train_loss: 0.2946\n",
      "903/6056, train_loss: 0.1683\n",
      "904/6056, train_loss: 0.1566\n",
      "905/6056, train_loss: 0.2265\n",
      "906/6056, train_loss: 0.1536\n",
      "907/6056, train_loss: 0.1756\n",
      "908/6056, train_loss: 0.2760\n",
      "909/6056, train_loss: 0.2178\n",
      "910/6056, train_loss: 0.2155\n",
      "911/6056, train_loss: 0.1510\n",
      "912/6056, train_loss: 0.1933\n",
      "913/6056, train_loss: 0.2125\n",
      "914/6056, train_loss: 0.2301\n",
      "915/6056, train_loss: 0.1830\n",
      "916/6056, train_loss: 0.2983\n",
      "917/6056, train_loss: 0.2592\n",
      "918/6056, train_loss: 0.2243\n",
      "919/6056, train_loss: 0.1399\n",
      "920/6056, train_loss: 0.2162\n",
      "921/6056, train_loss: 0.1790\n",
      "922/6056, train_loss: 0.2720\n",
      "923/6056, train_loss: 0.2171\n",
      "924/6056, train_loss: 0.1883\n",
      "925/6056, train_loss: 0.2615\n",
      "926/6056, train_loss: 0.1873\n",
      "927/6056, train_loss: 0.1575\n",
      "928/6056, train_loss: 0.2186\n",
      "929/6056, train_loss: 0.1644\n",
      "930/6056, train_loss: 0.1856\n",
      "931/6056, train_loss: 0.3024\n",
      "932/6056, train_loss: 0.1787\n",
      "933/6056, train_loss: 0.2032\n",
      "934/6056, train_loss: 0.2057\n",
      "935/6056, train_loss: 0.2568\n",
      "936/6056, train_loss: 0.1760\n",
      "937/6056, train_loss: 0.1810\n",
      "938/6056, train_loss: 0.2328\n",
      "939/6056, train_loss: 0.2255\n",
      "940/6056, train_loss: 0.1846\n",
      "941/6056, train_loss: 0.2025\n",
      "942/6056, train_loss: 0.2149\n",
      "943/6056, train_loss: 0.3006\n",
      "944/6056, train_loss: 0.1903\n",
      "945/6056, train_loss: 0.1703\n",
      "946/6056, train_loss: 0.1795\n",
      "947/6056, train_loss: 0.2362\n",
      "948/6056, train_loss: 0.1745\n",
      "949/6056, train_loss: 0.1516\n",
      "950/6056, train_loss: 0.1566\n",
      "951/6056, train_loss: 0.1485\n",
      "952/6056, train_loss: 0.1625\n",
      "953/6056, train_loss: 0.2469\n",
      "954/6056, train_loss: 0.1713\n",
      "955/6056, train_loss: 0.1986\n",
      "956/6056, train_loss: 0.2140\n",
      "957/6056, train_loss: 0.2747\n",
      "958/6056, train_loss: 0.1547\n",
      "959/6056, train_loss: 0.1807\n",
      "960/6056, train_loss: 0.2363\n",
      "961/6056, train_loss: 0.2324\n",
      "962/6056, train_loss: 0.1782\n",
      "963/6056, train_loss: 0.3114\n",
      "964/6056, train_loss: 0.2723\n",
      "965/6056, train_loss: 0.2346\n",
      "966/6056, train_loss: 0.1656\n",
      "967/6056, train_loss: 0.1959\n",
      "968/6056, train_loss: 0.2114\n",
      "969/6056, train_loss: 0.1568\n",
      "970/6056, train_loss: 0.2761\n",
      "971/6056, train_loss: 0.1257\n",
      "972/6056, train_loss: 0.2605\n",
      "973/6056, train_loss: 0.2090\n",
      "974/6056, train_loss: 0.2528\n",
      "975/6056, train_loss: 0.2295\n",
      "976/6056, train_loss: 0.1655\n",
      "977/6056, train_loss: 0.1842\n",
      "978/6056, train_loss: 0.2551\n",
      "979/6056, train_loss: 0.2655\n",
      "980/6056, train_loss: 0.1868\n",
      "981/6056, train_loss: 0.3012\n",
      "982/6056, train_loss: 0.2979\n",
      "983/6056, train_loss: 0.3393\n",
      "984/6056, train_loss: 0.2065\n",
      "985/6056, train_loss: 0.3597\n",
      "986/6056, train_loss: 0.1997\n",
      "987/6056, train_loss: 0.2133\n",
      "988/6056, train_loss: 0.1929\n",
      "989/6056, train_loss: 0.2532\n",
      "990/6056, train_loss: 0.2221\n",
      "991/6056, train_loss: 0.2249\n",
      "992/6056, train_loss: 0.1862\n",
      "993/6056, train_loss: 0.1814\n",
      "994/6056, train_loss: 0.1569\n",
      "995/6056, train_loss: 0.3160\n",
      "996/6056, train_loss: 0.2527\n",
      "997/6056, train_loss: 0.2455\n",
      "998/6056, train_loss: 0.2522\n",
      "999/6056, train_loss: 0.1678\n",
      "1000/6056, train_loss: 0.1698\n",
      "1001/6056, train_loss: 0.1597\n",
      "1002/6056, train_loss: 0.2089\n",
      "1003/6056, train_loss: 0.1411\n",
      "1004/6056, train_loss: 0.3242\n",
      "1005/6056, train_loss: 0.2641\n",
      "1006/6056, train_loss: 0.1432\n",
      "1007/6056, train_loss: 0.1888\n",
      "1008/6056, train_loss: 0.1894\n",
      "1009/6056, train_loss: 0.1370\n",
      "1010/6056, train_loss: 0.2838\n",
      "1011/6056, train_loss: 0.1751\n",
      "1012/6056, train_loss: 0.1761\n",
      "1013/6056, train_loss: 0.1936\n",
      "1014/6056, train_loss: 0.1616\n",
      "1015/6056, train_loss: 0.1689\n",
      "1016/6056, train_loss: 0.2854\n",
      "1017/6056, train_loss: 0.3725\n",
      "1018/6056, train_loss: 0.2449\n",
      "1019/6056, train_loss: 0.1349\n",
      "1020/6056, train_loss: 0.2070\n",
      "1021/6056, train_loss: 0.1814\n",
      "1022/6056, train_loss: 0.3310\n",
      "1023/6056, train_loss: 0.1685\n",
      "1024/6056, train_loss: 0.2129\n",
      "1025/6056, train_loss: 0.2580\n",
      "1026/6056, train_loss: 0.1730\n",
      "1027/6056, train_loss: 0.2961\n",
      "1028/6056, train_loss: 0.1641\n",
      "1029/6056, train_loss: 0.2147\n",
      "1030/6056, train_loss: 0.2086\n",
      "1031/6056, train_loss: 0.2484\n",
      "1032/6056, train_loss: 0.1986\n",
      "1033/6056, train_loss: 0.2496\n",
      "1034/6056, train_loss: 0.1298\n",
      "1035/6056, train_loss: 0.1765\n",
      "1036/6056, train_loss: 0.2105\n",
      "1037/6056, train_loss: 0.2541\n",
      "1038/6056, train_loss: 0.2004\n",
      "1039/6056, train_loss: 0.1975\n",
      "1040/6056, train_loss: 0.1933\n",
      "1041/6056, train_loss: 0.2529\n",
      "1042/6056, train_loss: 0.1861\n",
      "1043/6056, train_loss: 0.2459\n",
      "1044/6056, train_loss: 0.1509\n",
      "1045/6056, train_loss: 0.2302\n",
      "1046/6056, train_loss: 0.2236\n",
      "1047/6056, train_loss: 0.2430\n",
      "1048/6056, train_loss: 0.1822\n",
      "1049/6056, train_loss: 0.3124\n",
      "1050/6056, train_loss: 0.1984\n",
      "1051/6056, train_loss: 0.1681\n",
      "1052/6056, train_loss: 0.2454\n",
      "1053/6056, train_loss: 0.2687\n",
      "1054/6056, train_loss: 0.2024\n",
      "1055/6056, train_loss: 0.2817\n",
      "1056/6056, train_loss: 0.2993\n",
      "1057/6056, train_loss: 0.2515\n",
      "1058/6056, train_loss: 0.2376\n",
      "1059/6056, train_loss: 0.3035\n",
      "1060/6056, train_loss: 0.2309\n",
      "1061/6056, train_loss: 0.1724\n",
      "1062/6056, train_loss: 0.1549\n",
      "1063/6056, train_loss: 0.2394\n",
      "1064/6056, train_loss: 0.2217\n",
      "1065/6056, train_loss: 0.1867\n",
      "1066/6056, train_loss: 0.1792\n",
      "1067/6056, train_loss: 0.2338\n",
      "1068/6056, train_loss: 0.2273\n",
      "1069/6056, train_loss: 0.1783\n",
      "1070/6056, train_loss: 0.1953\n",
      "1071/6056, train_loss: 0.1350\n",
      "1072/6056, train_loss: 0.2663\n",
      "1073/6056, train_loss: 0.2947\n",
      "1074/6056, train_loss: 0.3570\n",
      "1075/6056, train_loss: 0.1729\n",
      "1076/6056, train_loss: 0.2688\n",
      "1077/6056, train_loss: 0.2738\n",
      "1078/6056, train_loss: 0.2433\n",
      "1079/6056, train_loss: 0.1985\n",
      "1080/6056, train_loss: 0.2485\n",
      "1081/6056, train_loss: 0.1803\n",
      "1082/6056, train_loss: 0.1777\n",
      "1083/6056, train_loss: 0.2401\n",
      "1084/6056, train_loss: 0.2610\n",
      "1085/6056, train_loss: 0.2438\n",
      "1086/6056, train_loss: 0.1889\n",
      "1087/6056, train_loss: 0.1613\n",
      "1088/6056, train_loss: 0.1525\n",
      "1089/6056, train_loss: 0.1494\n",
      "1090/6056, train_loss: 0.2953\n",
      "1091/6056, train_loss: 0.1678\n",
      "1092/6056, train_loss: 0.1960\n",
      "1093/6056, train_loss: 0.2575\n",
      "1094/6056, train_loss: 0.2420\n",
      "1095/6056, train_loss: 0.2025\n",
      "1096/6056, train_loss: 0.3418\n",
      "1097/6056, train_loss: 0.1596\n",
      "1098/6056, train_loss: 0.2047\n",
      "1099/6056, train_loss: 0.2397\n",
      "1100/6056, train_loss: 0.1751\n",
      "1101/6056, train_loss: 0.2676\n",
      "1102/6056, train_loss: 0.1953\n",
      "1103/6056, train_loss: 0.2327\n",
      "1104/6056, train_loss: 0.3099\n",
      "1105/6056, train_loss: 0.2603\n",
      "1106/6056, train_loss: 0.1722\n",
      "1107/6056, train_loss: 0.1518\n",
      "1108/6056, train_loss: 0.1766\n",
      "1109/6056, train_loss: 0.1349\n",
      "1110/6056, train_loss: 0.1847\n",
      "1111/6056, train_loss: 0.2004\n",
      "1112/6056, train_loss: 0.1953\n",
      "1113/6056, train_loss: 0.2381\n",
      "1114/6056, train_loss: 0.3023\n",
      "1115/6056, train_loss: 0.1648\n",
      "1116/6056, train_loss: 0.2040\n",
      "1117/6056, train_loss: 0.1891\n",
      "1118/6056, train_loss: 0.2232\n",
      "1119/6056, train_loss: 0.2704\n",
      "1120/6056, train_loss: 0.2432\n",
      "1121/6056, train_loss: 0.2109\n",
      "1122/6056, train_loss: 0.1827\n",
      "1123/6056, train_loss: 0.2398\n",
      "1124/6056, train_loss: 0.1507\n",
      "1125/6056, train_loss: 0.1689\n",
      "1126/6056, train_loss: 0.1645\n",
      "1127/6056, train_loss: 0.1958\n",
      "1128/6056, train_loss: 0.1467\n",
      "1129/6056, train_loss: 0.1647\n",
      "1130/6056, train_loss: 0.2393\n",
      "1131/6056, train_loss: 0.1287\n",
      "1132/6056, train_loss: 0.1660\n",
      "1133/6056, train_loss: 0.1757\n",
      "1134/6056, train_loss: 0.1756\n",
      "1135/6056, train_loss: 0.1361\n",
      "1136/6056, train_loss: 0.1110\n",
      "1137/6056, train_loss: 0.1764\n",
      "1138/6056, train_loss: 0.1765\n",
      "1139/6056, train_loss: 0.2459\n",
      "1140/6056, train_loss: 0.1608\n",
      "1141/6056, train_loss: 0.2091\n",
      "1142/6056, train_loss: 0.2198\n",
      "1143/6056, train_loss: 0.1777\n",
      "1144/6056, train_loss: 0.1704\n",
      "1145/6056, train_loss: 0.1230\n",
      "1146/6056, train_loss: 0.1868\n",
      "1147/6056, train_loss: 0.2592\n",
      "1148/6056, train_loss: 0.2160\n",
      "1149/6056, train_loss: 0.2473\n",
      "1150/6056, train_loss: 0.2197\n",
      "1151/6056, train_loss: 0.1687\n",
      "1152/6056, train_loss: 0.2368\n",
      "1153/6056, train_loss: 0.2103\n",
      "1154/6056, train_loss: 0.3160\n",
      "1155/6056, train_loss: 0.1956\n",
      "1156/6056, train_loss: 0.2303\n",
      "1157/6056, train_loss: 0.2532\n",
      "1158/6056, train_loss: 0.2733\n",
      "1159/6056, train_loss: 0.2271\n",
      "1160/6056, train_loss: 0.2604\n",
      "1161/6056, train_loss: 0.2627\n",
      "1162/6056, train_loss: 0.2509\n",
      "1163/6056, train_loss: 0.2261\n",
      "1164/6056, train_loss: 0.2244\n",
      "1165/6056, train_loss: 0.1643\n",
      "1166/6056, train_loss: 0.1434\n",
      "1167/6056, train_loss: 0.1254\n",
      "1168/6056, train_loss: 0.1946\n",
      "1169/6056, train_loss: 0.2104\n",
      "1170/6056, train_loss: 0.2073\n",
      "1171/6056, train_loss: 0.2828\n",
      "1172/6056, train_loss: 0.1755\n",
      "1173/6056, train_loss: 0.1811\n",
      "1174/6056, train_loss: 0.2161\n",
      "1175/6056, train_loss: 0.2242\n",
      "1176/6056, train_loss: 0.1249\n",
      "1177/6056, train_loss: 0.2020\n",
      "1178/6056, train_loss: 0.1885\n",
      "1179/6056, train_loss: 0.2011\n",
      "1180/6056, train_loss: 0.1875\n",
      "1181/6056, train_loss: 0.1953\n",
      "1182/6056, train_loss: 0.2725\n",
      "1183/6056, train_loss: 0.2838\n",
      "1184/6056, train_loss: 0.1980\n",
      "1185/6056, train_loss: 0.1653\n",
      "1186/6056, train_loss: 0.2701\n",
      "1187/6056, train_loss: 0.2053\n",
      "1188/6056, train_loss: 0.1917\n",
      "1189/6056, train_loss: 0.2801\n",
      "1190/6056, train_loss: 0.1529\n",
      "1191/6056, train_loss: 0.2925\n",
      "1192/6056, train_loss: 0.2430\n",
      "1193/6056, train_loss: 0.1785\n",
      "1194/6056, train_loss: 0.1733\n",
      "1195/6056, train_loss: 0.2303\n",
      "1196/6056, train_loss: 0.2769\n",
      "1197/6056, train_loss: 0.1749\n",
      "1198/6056, train_loss: 0.1901\n",
      "1199/6056, train_loss: 0.1593\n",
      "1200/6056, train_loss: 0.3868\n",
      "1201/6056, train_loss: 0.1866\n",
      "1202/6056, train_loss: 0.1578\n",
      "1203/6056, train_loss: 0.1735\n",
      "1204/6056, train_loss: 0.2864\n",
      "1205/6056, train_loss: 0.1836\n",
      "1206/6056, train_loss: 0.2225\n",
      "1207/6056, train_loss: 0.1828\n",
      "1208/6056, train_loss: 0.1306\n",
      "1209/6056, train_loss: 0.1389\n",
      "1210/6056, train_loss: 0.2547\n",
      "1211/6056, train_loss: 0.2477\n",
      "1212/6056, train_loss: 0.2189\n",
      "1213/6056, train_loss: 0.2503\n",
      "1214/6056, train_loss: 0.2035\n",
      "1215/6056, train_loss: 0.2320\n",
      "1216/6056, train_loss: 0.2877\n",
      "1217/6056, train_loss: 0.2133\n",
      "1218/6056, train_loss: 0.1641\n",
      "1219/6056, train_loss: 0.1272\n",
      "1220/6056, train_loss: 0.2284\n",
      "1221/6056, train_loss: 0.1602\n",
      "1222/6056, train_loss: 0.2138\n",
      "1223/6056, train_loss: 0.1525\n",
      "1224/6056, train_loss: 0.2646\n",
      "1225/6056, train_loss: 0.2210\n",
      "1226/6056, train_loss: 0.2014\n",
      "1227/6056, train_loss: 0.1869\n",
      "1228/6056, train_loss: 0.2276\n",
      "1229/6056, train_loss: 0.2455\n",
      "1230/6056, train_loss: 0.2271\n",
      "1231/6056, train_loss: 0.2828\n",
      "1232/6056, train_loss: 0.2299\n",
      "1233/6056, train_loss: 0.1975\n",
      "1234/6056, train_loss: 0.2073\n",
      "1235/6056, train_loss: 0.3336\n",
      "1236/6056, train_loss: 0.2020\n",
      "1237/6056, train_loss: 0.1752\n",
      "1238/6056, train_loss: 0.2206\n",
      "1239/6056, train_loss: 0.1390\n",
      "1240/6056, train_loss: 0.2542\n",
      "1241/6056, train_loss: 0.2232\n",
      "1242/6056, train_loss: 0.2104\n",
      "1243/6056, train_loss: 0.1884\n",
      "1244/6056, train_loss: 0.1748\n",
      "1245/6056, train_loss: 0.1485\n",
      "1246/6056, train_loss: 0.2003\n",
      "1247/6056, train_loss: 0.1940\n",
      "1248/6056, train_loss: 0.1903\n",
      "1249/6056, train_loss: 0.2621\n",
      "1250/6056, train_loss: 0.2883\n",
      "1251/6056, train_loss: 0.1905\n",
      "1252/6056, train_loss: 0.2358\n",
      "1253/6056, train_loss: 0.2906\n",
      "1254/6056, train_loss: 0.2610\n",
      "1255/6056, train_loss: 0.2237\n",
      "1256/6056, train_loss: 0.1503\n",
      "1257/6056, train_loss: 0.1910\n",
      "1258/6056, train_loss: 0.2383\n",
      "1259/6056, train_loss: 0.2423\n",
      "1260/6056, train_loss: 0.3122\n",
      "1261/6056, train_loss: 0.2465\n",
      "1262/6056, train_loss: 0.2338\n",
      "1263/6056, train_loss: 0.2032\n",
      "1264/6056, train_loss: 0.2158\n",
      "1265/6056, train_loss: 0.1860\n",
      "1266/6056, train_loss: 0.1322\n",
      "1267/6056, train_loss: 0.1998\n",
      "1268/6056, train_loss: 0.1470\n",
      "1269/6056, train_loss: 0.3354\n",
      "1270/6056, train_loss: 0.2151\n",
      "1271/6056, train_loss: 0.2514\n",
      "1272/6056, train_loss: 0.2039\n",
      "1273/6056, train_loss: 0.2394\n",
      "1274/6056, train_loss: 0.1465\n",
      "1275/6056, train_loss: 0.2546\n",
      "1276/6056, train_loss: 0.2492\n",
      "1277/6056, train_loss: 0.1821\n",
      "1278/6056, train_loss: 0.1752\n",
      "1279/6056, train_loss: 0.1901\n",
      "1280/6056, train_loss: 0.3093\n",
      "1281/6056, train_loss: 0.2612\n",
      "1282/6056, train_loss: 0.1644\n",
      "1283/6056, train_loss: 0.2333\n",
      "1284/6056, train_loss: 0.2762\n",
      "1285/6056, train_loss: 0.1561\n",
      "1286/6056, train_loss: 0.1594\n",
      "1287/6056, train_loss: 0.2050\n",
      "1288/6056, train_loss: 0.1953\n",
      "1289/6056, train_loss: 0.2889\n",
      "1290/6056, train_loss: 0.1687\n",
      "1291/6056, train_loss: 0.2725\n",
      "1292/6056, train_loss: 0.1891\n",
      "1293/6056, train_loss: 0.1965\n",
      "1294/6056, train_loss: 0.2326\n",
      "1295/6056, train_loss: 0.1611\n",
      "1296/6056, train_loss: 0.2405\n",
      "1297/6056, train_loss: 0.2030\n",
      "1298/6056, train_loss: 0.2168\n",
      "1299/6056, train_loss: 0.2472\n",
      "1300/6056, train_loss: 0.1509\n",
      "1301/6056, train_loss: 0.2211\n",
      "1302/6056, train_loss: 0.1719\n",
      "1303/6056, train_loss: 0.1586\n",
      "1304/6056, train_loss: 0.1660\n",
      "1305/6056, train_loss: 0.2480\n",
      "1306/6056, train_loss: 0.2170\n",
      "1307/6056, train_loss: 0.2160\n",
      "1308/6056, train_loss: 0.1797\n",
      "1309/6056, train_loss: 0.1821\n",
      "1310/6056, train_loss: 0.1897\n",
      "1311/6056, train_loss: 0.2388\n",
      "1312/6056, train_loss: 0.2590\n",
      "1313/6056, train_loss: 0.1832\n",
      "1314/6056, train_loss: 0.2963\n",
      "1315/6056, train_loss: 0.1815\n",
      "1316/6056, train_loss: 0.1845\n",
      "1317/6056, train_loss: 0.2296\n",
      "1318/6056, train_loss: 0.2178\n",
      "1319/6056, train_loss: 0.1904\n",
      "1320/6056, train_loss: 0.2627\n",
      "1321/6056, train_loss: 0.1745\n",
      "1322/6056, train_loss: 0.1678\n",
      "1323/6056, train_loss: 0.3270\n",
      "1324/6056, train_loss: 0.2258\n",
      "1325/6056, train_loss: 0.2256\n",
      "1326/6056, train_loss: 0.1651\n",
      "1327/6056, train_loss: 0.1960\n",
      "1328/6056, train_loss: 0.2064\n",
      "1329/6056, train_loss: 0.2145\n",
      "1330/6056, train_loss: 0.2052\n",
      "1331/6056, train_loss: 0.1682\n",
      "1332/6056, train_loss: 0.1467\n",
      "1333/6056, train_loss: 0.2593\n",
      "1334/6056, train_loss: 0.1492\n",
      "1335/6056, train_loss: 0.2120\n",
      "1336/6056, train_loss: 0.1905\n",
      "1337/6056, train_loss: 0.2092\n",
      "1338/6056, train_loss: 0.2103\n",
      "1339/6056, train_loss: 0.1699\n",
      "1340/6056, train_loss: 0.1882\n",
      "1341/6056, train_loss: 0.1670\n",
      "1342/6056, train_loss: 0.2116\n",
      "1343/6056, train_loss: 0.2012\n",
      "1344/6056, train_loss: 0.1705\n",
      "1345/6056, train_loss: 0.2805\n",
      "1346/6056, train_loss: 0.2189\n",
      "1347/6056, train_loss: 0.1778\n",
      "1348/6056, train_loss: 0.2843\n",
      "1349/6056, train_loss: 0.1777\n",
      "1350/6056, train_loss: 0.1525\n",
      "1351/6056, train_loss: 0.2092\n",
      "1352/6056, train_loss: 0.2108\n",
      "1353/6056, train_loss: 0.2445\n",
      "1354/6056, train_loss: 0.1710\n",
      "1355/6056, train_loss: 0.1974\n",
      "1356/6056, train_loss: 0.3216\n",
      "1357/6056, train_loss: 0.1684\n",
      "1358/6056, train_loss: 0.1874\n",
      "1359/6056, train_loss: 0.1620\n",
      "1360/6056, train_loss: 0.2111\n",
      "1361/6056, train_loss: 0.2902\n",
      "1362/6056, train_loss: 0.1954\n",
      "1363/6056, train_loss: 0.2215\n",
      "1364/6056, train_loss: 0.1711\n",
      "1365/6056, train_loss: 0.1232\n",
      "1366/6056, train_loss: 0.1942\n",
      "1367/6056, train_loss: 0.2137\n",
      "1368/6056, train_loss: 0.1156\n",
      "1369/6056, train_loss: 0.1493\n",
      "1370/6056, train_loss: 0.2316\n",
      "1371/6056, train_loss: 0.1807\n",
      "1372/6056, train_loss: 0.1414\n",
      "1373/6056, train_loss: 0.2716\n",
      "1374/6056, train_loss: 0.1479\n",
      "1375/6056, train_loss: 0.1595\n",
      "1376/6056, train_loss: 0.2002\n",
      "1377/6056, train_loss: 0.2647\n",
      "1378/6056, train_loss: 0.2188\n",
      "1379/6056, train_loss: 0.1354\n",
      "1380/6056, train_loss: 0.2083\n",
      "1381/6056, train_loss: 0.2286\n",
      "1382/6056, train_loss: 0.2141\n",
      "1383/6056, train_loss: 0.2064\n",
      "1384/6056, train_loss: 0.1600\n",
      "1385/6056, train_loss: 0.2845\n",
      "1386/6056, train_loss: 0.1672\n",
      "1387/6056, train_loss: 0.1656\n",
      "1388/6056, train_loss: 0.2387\n",
      "1389/6056, train_loss: 0.1429\n",
      "1390/6056, train_loss: 0.1851\n",
      "1391/6056, train_loss: 0.1396\n",
      "1392/6056, train_loss: 0.1423\n",
      "1393/6056, train_loss: 0.1658\n",
      "1394/6056, train_loss: 0.2693\n",
      "1395/6056, train_loss: 0.1825\n",
      "1396/6056, train_loss: 0.2811\n",
      "1397/6056, train_loss: 0.1936\n",
      "1398/6056, train_loss: 0.1930\n",
      "1399/6056, train_loss: 0.3152\n",
      "1400/6056, train_loss: 0.1873\n",
      "1401/6056, train_loss: 0.1519\n",
      "1402/6056, train_loss: 0.1978\n",
      "1403/6056, train_loss: 0.1799\n",
      "1404/6056, train_loss: 0.1989\n",
      "1405/6056, train_loss: 0.1436\n",
      "1406/6056, train_loss: 0.1554\n",
      "1407/6056, train_loss: 0.1018\n",
      "1408/6056, train_loss: 0.2199\n",
      "1409/6056, train_loss: 0.2580\n",
      "1410/6056, train_loss: 0.1988\n",
      "1411/6056, train_loss: 0.2177\n",
      "1412/6056, train_loss: 0.1321\n",
      "1413/6056, train_loss: 0.2097\n",
      "1414/6056, train_loss: 0.2749\n",
      "1415/6056, train_loss: 0.2202\n",
      "1416/6056, train_loss: 0.2438\n",
      "1417/6056, train_loss: 0.1818\n",
      "1418/6056, train_loss: 0.1653\n",
      "1419/6056, train_loss: 0.1792\n",
      "1420/6056, train_loss: 0.1752\n",
      "1421/6056, train_loss: 0.1447\n",
      "1422/6056, train_loss: 0.1547\n",
      "1423/6056, train_loss: 0.1988\n",
      "1424/6056, train_loss: 0.1522\n",
      "1425/6056, train_loss: 0.2851\n",
      "1426/6056, train_loss: 0.1131\n",
      "1427/6056, train_loss: 0.1998\n",
      "1428/6056, train_loss: 0.1603\n",
      "1429/6056, train_loss: 0.1525\n",
      "1430/6056, train_loss: 0.2462\n",
      "1431/6056, train_loss: 0.2447\n",
      "1432/6056, train_loss: 0.2016\n",
      "1433/6056, train_loss: 0.2191\n",
      "1434/6056, train_loss: 0.2168\n",
      "1435/6056, train_loss: 0.1751\n",
      "1436/6056, train_loss: 0.3127\n",
      "1437/6056, train_loss: 0.1959\n",
      "1438/6056, train_loss: 0.2148\n",
      "1439/6056, train_loss: 0.2731\n",
      "1440/6056, train_loss: 0.2572\n",
      "1441/6056, train_loss: 0.1427\n",
      "1442/6056, train_loss: 0.2724\n",
      "1443/6056, train_loss: 0.1939\n",
      "1444/6056, train_loss: 0.1986\n",
      "1445/6056, train_loss: 0.2109\n",
      "1446/6056, train_loss: 0.2767\n",
      "1447/6056, train_loss: 0.2239\n",
      "1448/6056, train_loss: 0.1681\n",
      "1449/6056, train_loss: 0.2245\n",
      "1450/6056, train_loss: 0.2171\n",
      "1451/6056, train_loss: 0.1835\n",
      "1452/6056, train_loss: 0.1935\n",
      "1453/6056, train_loss: 0.2586\n",
      "1454/6056, train_loss: 0.1728\n",
      "1455/6056, train_loss: 0.2220\n",
      "1456/6056, train_loss: 0.2267\n",
      "1457/6056, train_loss: 0.2316\n",
      "1458/6056, train_loss: 0.2226\n",
      "1459/6056, train_loss: 0.2459\n",
      "1460/6056, train_loss: 0.1834\n",
      "1461/6056, train_loss: 0.1876\n",
      "1462/6056, train_loss: 0.1622\n",
      "1463/6056, train_loss: 0.2639\n",
      "1464/6056, train_loss: 0.2370\n",
      "1465/6056, train_loss: 0.3021\n",
      "1466/6056, train_loss: 0.2475\n",
      "1467/6056, train_loss: 0.1834\n",
      "1468/6056, train_loss: 0.3443\n",
      "1469/6056, train_loss: 0.1999\n",
      "1470/6056, train_loss: 0.2432\n",
      "1471/6056, train_loss: 0.2190\n",
      "1472/6056, train_loss: 0.1945\n",
      "1473/6056, train_loss: 0.1784\n",
      "1474/6056, train_loss: 0.2380\n",
      "1475/6056, train_loss: 0.1728\n",
      "1476/6056, train_loss: 0.1468\n",
      "1477/6056, train_loss: 0.1391\n",
      "1478/6056, train_loss: 0.1515\n",
      "1479/6056, train_loss: 0.2352\n",
      "1480/6056, train_loss: 0.1545\n",
      "1481/6056, train_loss: 0.1697\n",
      "1482/6056, train_loss: 0.2119\n",
      "1483/6056, train_loss: 0.2151\n",
      "1484/6056, train_loss: 0.1549\n",
      "1485/6056, train_loss: 0.2278\n",
      "1486/6056, train_loss: 0.2110\n",
      "1487/6056, train_loss: 0.2699\n",
      "1488/6056, train_loss: 0.2606\n",
      "1489/6056, train_loss: 0.1521\n",
      "1490/6056, train_loss: 0.3157\n",
      "1491/6056, train_loss: 0.1681\n",
      "1492/6056, train_loss: 0.2008\n",
      "1493/6056, train_loss: 0.1697\n",
      "1494/6056, train_loss: 0.2312\n",
      "1495/6056, train_loss: 0.2055\n",
      "1496/6056, train_loss: 0.1571\n",
      "1497/6056, train_loss: 0.2276\n",
      "1498/6056, train_loss: 0.1554\n",
      "1499/6056, train_loss: 0.2638\n",
      "1500/6056, train_loss: 0.1236\n",
      "1501/6056, train_loss: 0.2013\n",
      "1502/6056, train_loss: 0.1789\n",
      "1503/6056, train_loss: 0.2093\n",
      "1504/6056, train_loss: 0.2493\n",
      "1505/6056, train_loss: 0.2948\n",
      "1506/6056, train_loss: 0.1873\n",
      "1507/6056, train_loss: 0.1962\n",
      "1508/6056, train_loss: 0.1192\n",
      "1509/6056, train_loss: 0.1974\n",
      "1510/6056, train_loss: 0.0997\n",
      "1511/6056, train_loss: 0.2519\n",
      "1512/6056, train_loss: 0.1606\n",
      "1513/6056, train_loss: 0.2350\n",
      "1514/6056, train_loss: 0.2690\n",
      "1515/6056, train_loss: 0.2253\n",
      "1516/6056, train_loss: 0.1704\n",
      "1517/6056, train_loss: 0.1615\n",
      "1518/6056, train_loss: 0.3284\n",
      "1519/6056, train_loss: 0.1445\n",
      "1520/6056, train_loss: 0.1915\n",
      "1521/6056, train_loss: 0.1663\n",
      "1522/6056, train_loss: 0.1728\n",
      "1523/6056, train_loss: 0.1212\n",
      "1524/6056, train_loss: 0.1133\n",
      "1525/6056, train_loss: 0.1356\n",
      "1526/6056, train_loss: 0.1779\n",
      "1527/6056, train_loss: 0.2567\n",
      "1528/6056, train_loss: 0.2380\n",
      "1529/6056, train_loss: 0.1446\n",
      "1530/6056, train_loss: 0.1392\n",
      "1531/6056, train_loss: 0.2276\n",
      "1532/6056, train_loss: 0.1666\n",
      "1533/6056, train_loss: 0.2427\n",
      "1534/6056, train_loss: 0.1541\n",
      "1535/6056, train_loss: 0.1482\n",
      "1536/6056, train_loss: 0.1576\n",
      "1537/6056, train_loss: 0.2135\n",
      "1538/6056, train_loss: 0.1742\n",
      "1539/6056, train_loss: 0.1240\n",
      "1540/6056, train_loss: 0.1516\n",
      "1541/6056, train_loss: 0.1731\n",
      "1542/6056, train_loss: 0.1451\n",
      "1543/6056, train_loss: 0.2342\n",
      "1544/6056, train_loss: 0.1681\n",
      "1545/6056, train_loss: 0.1893\n",
      "1546/6056, train_loss: 0.1972\n",
      "1547/6056, train_loss: 0.2843\n",
      "1548/6056, train_loss: 0.2068\n",
      "1549/6056, train_loss: 0.2278\n",
      "1550/6056, train_loss: 0.1279\n",
      "1551/6056, train_loss: 0.1940\n",
      "1552/6056, train_loss: 0.1179\n",
      "1553/6056, train_loss: 0.2177\n",
      "1554/6056, train_loss: 0.2390\n",
      "1555/6056, train_loss: 0.2323\n",
      "1556/6056, train_loss: 0.1309\n",
      "1557/6056, train_loss: 0.2155\n",
      "1558/6056, train_loss: 0.2156\n",
      "1559/6056, train_loss: 0.1273\n",
      "1560/6056, train_loss: 0.1968\n",
      "1561/6056, train_loss: 0.1576\n",
      "1562/6056, train_loss: 0.2081\n",
      "1563/6056, train_loss: 0.2348\n",
      "1564/6056, train_loss: 0.2076\n",
      "1565/6056, train_loss: 0.1654\n",
      "1566/6056, train_loss: 0.1840\n",
      "1567/6056, train_loss: 0.2266\n",
      "1568/6056, train_loss: 0.1799\n",
      "1569/6056, train_loss: 0.1764\n",
      "1570/6056, train_loss: 0.2134\n",
      "1571/6056, train_loss: 0.2094\n",
      "1572/6056, train_loss: 0.1783\n",
      "1573/6056, train_loss: 0.2010\n",
      "1574/6056, train_loss: 0.1122\n",
      "1575/6056, train_loss: 0.2905\n",
      "1576/6056, train_loss: 0.2600\n",
      "1577/6056, train_loss: 0.1476\n",
      "1578/6056, train_loss: 0.2073\n",
      "1579/6056, train_loss: 0.2154\n",
      "1580/6056, train_loss: 0.1883\n",
      "1581/6056, train_loss: 0.2018\n",
      "1582/6056, train_loss: 0.1736\n",
      "1583/6056, train_loss: 0.1566\n",
      "1584/6056, train_loss: 0.1325\n",
      "1585/6056, train_loss: 0.2944\n",
      "1586/6056, train_loss: 0.2399\n",
      "1587/6056, train_loss: 0.1516\n",
      "1588/6056, train_loss: 0.2276\n",
      "1589/6056, train_loss: 0.2609\n",
      "1590/6056, train_loss: 0.1283\n",
      "1591/6056, train_loss: 0.2231\n",
      "1592/6056, train_loss: 0.2133\n",
      "1593/6056, train_loss: 0.1856\n",
      "1594/6056, train_loss: 0.2184\n",
      "1595/6056, train_loss: 0.2394\n",
      "1596/6056, train_loss: 0.2206\n",
      "1597/6056, train_loss: 0.2920\n",
      "1598/6056, train_loss: 0.2647\n",
      "1599/6056, train_loss: 0.1403\n",
      "1600/6056, train_loss: 0.1780\n",
      "1601/6056, train_loss: 0.2248\n",
      "1602/6056, train_loss: 0.2194\n",
      "1603/6056, train_loss: 0.2348\n",
      "1604/6056, train_loss: 0.2335\n",
      "1605/6056, train_loss: 0.1564\n",
      "1606/6056, train_loss: 0.1714\n",
      "1607/6056, train_loss: 0.2063\n",
      "1608/6056, train_loss: 0.1661\n",
      "1609/6056, train_loss: 0.2018\n",
      "1610/6056, train_loss: 0.2844\n",
      "1611/6056, train_loss: 0.1868\n",
      "1612/6056, train_loss: 0.1130\n",
      "1613/6056, train_loss: 0.2340\n",
      "1614/6056, train_loss: 0.1813\n",
      "1615/6056, train_loss: 0.1636\n",
      "1616/6056, train_loss: 0.1294\n",
      "1617/6056, train_loss: 0.1375\n",
      "1618/6056, train_loss: 0.2552\n",
      "1619/6056, train_loss: 0.2200\n",
      "1620/6056, train_loss: 0.1335\n",
      "1621/6056, train_loss: 0.1670\n",
      "1622/6056, train_loss: 0.2124\n",
      "1623/6056, train_loss: 0.2286\n",
      "1624/6056, train_loss: 0.1604\n",
      "1625/6056, train_loss: 0.2277\n",
      "1626/6056, train_loss: 0.1342\n",
      "1627/6056, train_loss: 0.1661\n",
      "1628/6056, train_loss: 0.2699\n",
      "1629/6056, train_loss: 0.2303\n",
      "1630/6056, train_loss: 0.2375\n",
      "1631/6056, train_loss: 0.1852\n",
      "1632/6056, train_loss: 0.2250\n",
      "1633/6056, train_loss: 0.1545\n",
      "1634/6056, train_loss: 0.1393\n",
      "1635/6056, train_loss: 0.2558\n",
      "1636/6056, train_loss: 0.1034\n",
      "1637/6056, train_loss: 0.2975\n",
      "1638/6056, train_loss: 0.1915\n",
      "1639/6056, train_loss: 0.2249\n",
      "1640/6056, train_loss: 0.1757\n",
      "1641/6056, train_loss: 0.1846\n",
      "1642/6056, train_loss: 0.1200\n",
      "1643/6056, train_loss: 0.1718\n",
      "1644/6056, train_loss: 0.1331\n",
      "1645/6056, train_loss: 0.1299\n",
      "1646/6056, train_loss: 0.2301\n",
      "1647/6056, train_loss: 0.1180\n",
      "1648/6056, train_loss: 0.2360\n",
      "1649/6056, train_loss: 0.1910\n",
      "1650/6056, train_loss: 0.1869\n",
      "1651/6056, train_loss: 0.1416\n",
      "1652/6056, train_loss: 0.1658\n",
      "1653/6056, train_loss: 0.2310\n",
      "1654/6056, train_loss: 0.2584\n",
      "1655/6056, train_loss: 0.3314\n",
      "1656/6056, train_loss: 0.1781\n",
      "1657/6056, train_loss: 0.2974\n",
      "1658/6056, train_loss: 0.2050\n",
      "1659/6056, train_loss: 0.1867\n",
      "1660/6056, train_loss: 0.2124\n",
      "1661/6056, train_loss: 0.1862\n",
      "1662/6056, train_loss: 0.0938\n",
      "1663/6056, train_loss: 0.2044\n",
      "1664/6056, train_loss: 0.2662\n",
      "1665/6056, train_loss: 0.1568\n",
      "1666/6056, train_loss: 0.1833\n",
      "1667/6056, train_loss: 0.1504\n",
      "1668/6056, train_loss: 0.1841\n",
      "1669/6056, train_loss: 0.1417\n",
      "1670/6056, train_loss: 0.1854\n",
      "1671/6056, train_loss: 0.1756\n",
      "1672/6056, train_loss: 0.1341\n",
      "1673/6056, train_loss: 0.1734\n",
      "1674/6056, train_loss: 0.2585\n",
      "1675/6056, train_loss: 0.2928\n",
      "1676/6056, train_loss: 0.2175\n",
      "1677/6056, train_loss: 0.1542\n",
      "1678/6056, train_loss: 0.1658\n",
      "1679/6056, train_loss: 0.1658\n",
      "1680/6056, train_loss: 0.2587\n",
      "1681/6056, train_loss: 0.1610\n",
      "1682/6056, train_loss: 0.1495\n",
      "1683/6056, train_loss: 0.1418\n",
      "1684/6056, train_loss: 0.1072\n",
      "1685/6056, train_loss: 0.1914\n",
      "1686/6056, train_loss: 0.1800\n",
      "1687/6056, train_loss: 0.1232\n",
      "1688/6056, train_loss: 0.2018\n",
      "1689/6056, train_loss: 0.2190\n",
      "1690/6056, train_loss: 0.2008\n",
      "1691/6056, train_loss: 0.2863\n",
      "1692/6056, train_loss: 0.2188\n",
      "1693/6056, train_loss: 0.3452\n",
      "1694/6056, train_loss: 0.1914\n",
      "1695/6056, train_loss: 0.2132\n",
      "1696/6056, train_loss: 0.1287\n",
      "1697/6056, train_loss: 0.1634\n",
      "1698/6056, train_loss: 0.1131\n",
      "1699/6056, train_loss: 0.1669\n",
      "1700/6056, train_loss: 0.1443\n",
      "1701/6056, train_loss: 0.2220\n",
      "1702/6056, train_loss: 0.1834\n",
      "1703/6056, train_loss: 0.2017\n",
      "1704/6056, train_loss: 0.1437\n",
      "1705/6056, train_loss: 0.1658\n",
      "1706/6056, train_loss: 0.2562\n",
      "1707/6056, train_loss: 0.1981\n",
      "1708/6056, train_loss: 0.1639\n",
      "1709/6056, train_loss: 0.1363\n",
      "1710/6056, train_loss: 0.2924\n",
      "1711/6056, train_loss: 0.3010\n",
      "1712/6056, train_loss: 0.2204\n",
      "1713/6056, train_loss: 0.1437\n",
      "1714/6056, train_loss: 0.2787\n",
      "1715/6056, train_loss: 0.1497\n",
      "1716/6056, train_loss: 0.2857\n",
      "1717/6056, train_loss: 0.1526\n",
      "1718/6056, train_loss: 0.1310\n",
      "1719/6056, train_loss: 0.1786\n",
      "1720/6056, train_loss: 0.1776\n",
      "1721/6056, train_loss: 0.1695\n",
      "1722/6056, train_loss: 0.1974\n",
      "1723/6056, train_loss: 0.1901\n",
      "1724/6056, train_loss: 0.1378\n",
      "1725/6056, train_loss: 0.2585\n",
      "1726/6056, train_loss: 0.2065\n",
      "1727/6056, train_loss: 0.1520\n",
      "1728/6056, train_loss: 0.2288\n",
      "1729/6056, train_loss: 0.1215\n",
      "1730/6056, train_loss: 0.1430\n",
      "1731/6056, train_loss: 0.1815\n",
      "1732/6056, train_loss: 0.1402\n",
      "1733/6056, train_loss: 0.3047\n",
      "1734/6056, train_loss: 0.1474\n",
      "1735/6056, train_loss: 0.2813\n",
      "1736/6056, train_loss: 0.2720\n",
      "1737/6056, train_loss: 0.1887\n",
      "1738/6056, train_loss: 0.1757\n",
      "1739/6056, train_loss: 0.1848\n",
      "1740/6056, train_loss: 0.1436\n",
      "1741/6056, train_loss: 0.1353\n",
      "1742/6056, train_loss: 0.1758\n",
      "1743/6056, train_loss: 0.2088\n",
      "1744/6056, train_loss: 0.2167\n",
      "1745/6056, train_loss: 0.1505\n",
      "1746/6056, train_loss: 0.2636\n",
      "1747/6056, train_loss: 0.1996\n",
      "1748/6056, train_loss: 0.2905\n",
      "1749/6056, train_loss: 0.2856\n",
      "1750/6056, train_loss: 0.1860\n",
      "1751/6056, train_loss: 0.1623\n",
      "1752/6056, train_loss: 0.1979\n",
      "1753/6056, train_loss: 0.1686\n",
      "1754/6056, train_loss: 0.1844\n",
      "1755/6056, train_loss: 0.1300\n",
      "1756/6056, train_loss: 0.1825\n",
      "1757/6056, train_loss: 0.3311\n",
      "1758/6056, train_loss: 0.2799\n",
      "1759/6056, train_loss: 0.2082\n",
      "1760/6056, train_loss: 0.1210\n",
      "1761/6056, train_loss: 0.2785\n",
      "1762/6056, train_loss: 0.2042\n",
      "1763/6056, train_loss: 0.1605\n",
      "1764/6056, train_loss: 0.1223\n",
      "1765/6056, train_loss: 0.2302\n",
      "1766/6056, train_loss: 0.1746\n",
      "1767/6056, train_loss: 0.1758\n",
      "1768/6056, train_loss: 0.1961\n",
      "1769/6056, train_loss: 0.1422\n",
      "1770/6056, train_loss: 0.2435\n",
      "1771/6056, train_loss: 0.2326\n",
      "1772/6056, train_loss: 0.1495\n",
      "1773/6056, train_loss: 0.2049\n",
      "1774/6056, train_loss: 0.1549\n",
      "1775/6056, train_loss: 0.1928\n",
      "1776/6056, train_loss: 0.3829\n",
      "1777/6056, train_loss: 0.2216\n",
      "1778/6056, train_loss: 0.2168\n",
      "1779/6056, train_loss: 0.2252\n",
      "1780/6056, train_loss: 0.1493\n",
      "1781/6056, train_loss: 0.1583\n",
      "1782/6056, train_loss: 0.1515\n",
      "1783/6056, train_loss: 0.1710\n",
      "1784/6056, train_loss: 0.1805\n",
      "1785/6056, train_loss: 0.2739\n",
      "1786/6056, train_loss: 0.1779\n",
      "1787/6056, train_loss: 0.1792\n",
      "1788/6056, train_loss: 0.1577\n",
      "1789/6056, train_loss: 0.1662\n",
      "1790/6056, train_loss: 0.1451\n",
      "1791/6056, train_loss: 0.2510\n",
      "1792/6056, train_loss: 0.3223\n",
      "1793/6056, train_loss: 0.2125\n",
      "1794/6056, train_loss: 0.2665\n",
      "1795/6056, train_loss: 0.2418\n",
      "1796/6056, train_loss: 0.2238\n",
      "1797/6056, train_loss: 0.2125\n",
      "1798/6056, train_loss: 0.1901\n",
      "1799/6056, train_loss: 0.2398\n",
      "1800/6056, train_loss: 0.1716\n",
      "1801/6056, train_loss: 0.2175\n",
      "1802/6056, train_loss: 0.1978\n",
      "1803/6056, train_loss: 0.2001\n",
      "1804/6056, train_loss: 0.1273\n",
      "1805/6056, train_loss: 0.1755\n",
      "1806/6056, train_loss: 0.1745\n",
      "1807/6056, train_loss: 0.2312\n",
      "1808/6056, train_loss: 0.2239\n",
      "1809/6056, train_loss: 0.2225\n",
      "1810/6056, train_loss: 0.1379\n",
      "1811/6056, train_loss: 0.2291\n",
      "1812/6056, train_loss: 0.2003\n",
      "1813/6056, train_loss: 0.3017\n",
      "1814/6056, train_loss: 0.1455\n",
      "1815/6056, train_loss: 0.3545\n",
      "1816/6056, train_loss: 0.1980\n",
      "1817/6056, train_loss: 0.1762\n",
      "1818/6056, train_loss: 0.2011\n",
      "1819/6056, train_loss: 0.1558\n",
      "1820/6056, train_loss: 0.1965\n",
      "1821/6056, train_loss: 0.1033\n",
      "1822/6056, train_loss: 0.3043\n",
      "1823/6056, train_loss: 0.1639\n",
      "1824/6056, train_loss: 0.1963\n",
      "1825/6056, train_loss: 0.1622\n",
      "1826/6056, train_loss: 0.2966\n",
      "1827/6056, train_loss: 0.1930\n",
      "1828/6056, train_loss: 0.1839\n",
      "1829/6056, train_loss: 0.1525\n",
      "1830/6056, train_loss: 0.2463\n",
      "1831/6056, train_loss: 0.1374\n",
      "1832/6056, train_loss: 0.1457\n",
      "1833/6056, train_loss: 0.1897\n",
      "1834/6056, train_loss: 0.2593\n",
      "1835/6056, train_loss: 0.1895\n",
      "1836/6056, train_loss: 0.1762\n",
      "1837/6056, train_loss: 0.1949\n",
      "1838/6056, train_loss: 0.2067\n",
      "1839/6056, train_loss: 0.1968\n",
      "1840/6056, train_loss: 0.1559\n",
      "1841/6056, train_loss: 0.2144\n",
      "1842/6056, train_loss: 0.2666\n",
      "1843/6056, train_loss: 0.1382\n",
      "1844/6056, train_loss: 0.2010\n",
      "1845/6056, train_loss: 0.1399\n",
      "1846/6056, train_loss: 0.2162\n",
      "1847/6056, train_loss: 0.1621\n",
      "1848/6056, train_loss: 0.2160\n",
      "1849/6056, train_loss: 0.1521\n",
      "1850/6056, train_loss: 0.2031\n",
      "1851/6056, train_loss: 0.1783\n",
      "1852/6056, train_loss: 0.1305\n",
      "1853/6056, train_loss: 0.1568\n",
      "1854/6056, train_loss: 0.1615\n",
      "1855/6056, train_loss: 0.2021\n",
      "1856/6056, train_loss: 0.2027\n",
      "1857/6056, train_loss: 0.1784\n",
      "1858/6056, train_loss: 0.1911\n",
      "1859/6056, train_loss: 0.1451\n",
      "1860/6056, train_loss: 0.1278\n",
      "1861/6056, train_loss: 0.1880\n",
      "1862/6056, train_loss: 0.2494\n",
      "1863/6056, train_loss: 0.2376\n",
      "1864/6056, train_loss: 0.2044\n",
      "1865/6056, train_loss: 0.2628\n",
      "1866/6056, train_loss: 0.3232\n",
      "1867/6056, train_loss: 0.1630\n",
      "1868/6056, train_loss: 0.1947\n",
      "1869/6056, train_loss: 0.2240\n",
      "1870/6056, train_loss: 0.1672\n",
      "1871/6056, train_loss: 0.1790\n",
      "1872/6056, train_loss: 0.1805\n",
      "1873/6056, train_loss: 0.2398\n",
      "1874/6056, train_loss: 0.1592\n",
      "1875/6056, train_loss: 0.1223\n",
      "1876/6056, train_loss: 0.1635\n",
      "1877/6056, train_loss: 0.2514\n",
      "1878/6056, train_loss: 0.2395\n",
      "1879/6056, train_loss: 0.1898\n",
      "1880/6056, train_loss: 0.1931\n",
      "1881/6056, train_loss: 0.1550\n",
      "1882/6056, train_loss: 0.2597\n",
      "1883/6056, train_loss: 0.2509\n",
      "1884/6056, train_loss: 0.2379\n",
      "1885/6056, train_loss: 0.1406\n",
      "1886/6056, train_loss: 0.2610\n",
      "1887/6056, train_loss: 0.1319\n",
      "1888/6056, train_loss: 0.1649\n",
      "1889/6056, train_loss: 0.1966\n",
      "1890/6056, train_loss: 0.1763\n",
      "1891/6056, train_loss: 0.1479\n",
      "1892/6056, train_loss: 0.1998\n",
      "1893/6056, train_loss: 0.2126\n",
      "1894/6056, train_loss: 0.1818\n",
      "1895/6056, train_loss: 0.3175\n",
      "1896/6056, train_loss: 0.2417\n",
      "1897/6056, train_loss: 0.1940\n",
      "1898/6056, train_loss: 0.1882\n",
      "1899/6056, train_loss: 0.2223\n",
      "1900/6056, train_loss: 0.1437\n",
      "1901/6056, train_loss: 0.2543\n",
      "1902/6056, train_loss: 0.1492\n",
      "1903/6056, train_loss: 0.1975\n",
      "1904/6056, train_loss: 0.1636\n",
      "1905/6056, train_loss: 0.2208\n",
      "1906/6056, train_loss: 0.2071\n",
      "1907/6056, train_loss: 0.2502\n",
      "1908/6056, train_loss: 0.1326\n",
      "1909/6056, train_loss: 0.1251\n",
      "1910/6056, train_loss: 0.2509\n",
      "1911/6056, train_loss: 0.1431\n",
      "1912/6056, train_loss: 0.1519\n",
      "1913/6056, train_loss: 0.2186\n",
      "1914/6056, train_loss: 0.1786\n",
      "1915/6056, train_loss: 0.1694\n",
      "1916/6056, train_loss: 0.1840\n",
      "1917/6056, train_loss: 0.2119\n",
      "1918/6056, train_loss: 0.2846\n",
      "1919/6056, train_loss: 0.2777\n",
      "1920/6056, train_loss: 0.2090\n",
      "1921/6056, train_loss: 0.1281\n",
      "1922/6056, train_loss: 0.2234\n",
      "1923/6056, train_loss: 0.2608\n",
      "1924/6056, train_loss: 0.2684\n",
      "1925/6056, train_loss: 0.1348\n",
      "1926/6056, train_loss: 0.2277\n",
      "1927/6056, train_loss: 0.1849\n",
      "1928/6056, train_loss: 0.1245\n",
      "1929/6056, train_loss: 0.1250\n",
      "1930/6056, train_loss: 0.1489\n",
      "1931/6056, train_loss: 0.1714\n",
      "1932/6056, train_loss: 0.2181\n",
      "1933/6056, train_loss: 0.1310\n",
      "1934/6056, train_loss: 0.1819\n",
      "1935/6056, train_loss: 0.1637\n",
      "1936/6056, train_loss: 0.1765\n",
      "1937/6056, train_loss: 0.1950\n",
      "1938/6056, train_loss: 0.2832\n",
      "1939/6056, train_loss: 0.2095\n",
      "1940/6056, train_loss: 0.1816\n",
      "1941/6056, train_loss: 0.1728\n",
      "1942/6056, train_loss: 0.2469\n",
      "1943/6056, train_loss: 0.2572\n",
      "1944/6056, train_loss: 0.0959\n",
      "1945/6056, train_loss: 0.2400\n",
      "1946/6056, train_loss: 0.2113\n",
      "1947/6056, train_loss: 0.2620\n",
      "1948/6056, train_loss: 0.2006\n",
      "1949/6056, train_loss: 0.1988\n",
      "1950/6056, train_loss: 0.2114\n",
      "1951/6056, train_loss: 0.1514\n",
      "1952/6056, train_loss: 0.2584\n",
      "1953/6056, train_loss: 0.1651\n",
      "1954/6056, train_loss: 0.0998\n",
      "1955/6056, train_loss: 0.2059\n",
      "1956/6056, train_loss: 0.2631\n",
      "1957/6056, train_loss: 0.1493\n",
      "1958/6056, train_loss: 0.2042\n",
      "1959/6056, train_loss: 0.2037\n",
      "1960/6056, train_loss: 0.1915\n",
      "1961/6056, train_loss: 0.1724\n",
      "1962/6056, train_loss: 0.2068\n",
      "1963/6056, train_loss: 0.2637\n",
      "1964/6056, train_loss: 0.1431\n",
      "1965/6056, train_loss: 0.1949\n",
      "1966/6056, train_loss: 0.1472\n",
      "1967/6056, train_loss: 0.2069\n",
      "1968/6056, train_loss: 0.1723\n",
      "1969/6056, train_loss: 0.1282\n",
      "1970/6056, train_loss: 0.2200\n",
      "1971/6056, train_loss: 0.1975\n",
      "1972/6056, train_loss: 0.2095\n",
      "1973/6056, train_loss: 0.2517\n",
      "1974/6056, train_loss: 0.1992\n",
      "1975/6056, train_loss: 0.2050\n",
      "1976/6056, train_loss: 0.3009\n",
      "1977/6056, train_loss: 0.1290\n",
      "1978/6056, train_loss: 0.2326\n",
      "1979/6056, train_loss: 0.1839\n",
      "1980/6056, train_loss: 0.2240\n",
      "1981/6056, train_loss: 0.1794\n",
      "1982/6056, train_loss: 0.2994\n",
      "1983/6056, train_loss: 0.2084\n",
      "1984/6056, train_loss: 0.1821\n",
      "1985/6056, train_loss: 0.1334\n",
      "1986/6056, train_loss: 0.1788\n",
      "1987/6056, train_loss: 0.1944\n",
      "1988/6056, train_loss: 0.1878\n",
      "1989/6056, train_loss: 0.1262\n",
      "1990/6056, train_loss: 0.2051\n",
      "1991/6056, train_loss: 0.2414\n",
      "1992/6056, train_loss: 0.2612\n",
      "1993/6056, train_loss: 0.2051\n",
      "1994/6056, train_loss: 0.3262\n",
      "1995/6056, train_loss: 0.2648\n",
      "1996/6056, train_loss: 0.1547\n",
      "1997/6056, train_loss: 0.2585\n",
      "1998/6056, train_loss: 0.1372\n",
      "1999/6056, train_loss: 0.2217\n",
      "2000/6056, train_loss: 0.1742\n",
      "2001/6056, train_loss: 0.1796\n",
      "2002/6056, train_loss: 0.1649\n",
      "2003/6056, train_loss: 0.2142\n",
      "2004/6056, train_loss: 0.1747\n",
      "2005/6056, train_loss: 0.1802\n",
      "2006/6056, train_loss: 0.2261\n",
      "2007/6056, train_loss: 0.2003\n",
      "2008/6056, train_loss: 0.2375\n",
      "2009/6056, train_loss: 0.1656\n",
      "2010/6056, train_loss: 0.1068\n",
      "2011/6056, train_loss: 0.1636\n",
      "2012/6056, train_loss: 0.1606\n",
      "2013/6056, train_loss: 0.1239\n",
      "2032/6056, train_loss: 0.1750\n",
      "2033/6056, train_loss: 0.1377\n",
      "2034/6056, train_loss: 0.1696\n",
      "2035/6056, train_loss: 0.1535\n",
      "2036/6056, train_loss: 0.1820\n",
      "2037/6056, train_loss: 0.2346\n",
      "2038/6056, train_loss: 0.1698\n",
      "2039/6056, train_loss: 0.1997\n",
      "2040/6056, train_loss: 0.1852\n",
      "2041/6056, train_loss: 0.2083\n",
      "2042/6056, train_loss: 0.1789\n",
      "2043/6056, train_loss: 0.1506\n",
      "2044/6056, train_loss: 0.1852\n",
      "2045/6056, train_loss: 0.1870\n",
      "2046/6056, train_loss: 0.1719\n",
      "2047/6056, train_loss: 0.2269\n",
      "2048/6056, train_loss: 0.1881\n",
      "2049/6056, train_loss: 0.1814\n",
      "2050/6056, train_loss: 0.1780\n",
      "2051/6056, train_loss: 0.1758\n",
      "2052/6056, train_loss: 0.1896\n",
      "2053/6056, train_loss: 0.1398\n",
      "2054/6056, train_loss: 0.2276\n",
      "2055/6056, train_loss: 0.1564\n",
      "2056/6056, train_loss: 0.1349\n",
      "2057/6056, train_loss: 0.2040\n",
      "2058/6056, train_loss: 0.1792\n",
      "2059/6056, train_loss: 0.1270\n",
      "2060/6056, train_loss: 0.2254\n",
      "2061/6056, train_loss: 0.1527\n",
      "2062/6056, train_loss: 0.2260\n",
      "2063/6056, train_loss: 0.2574\n",
      "2064/6056, train_loss: 0.2042\n",
      "2065/6056, train_loss: 0.1358\n",
      "2066/6056, train_loss: 0.2356\n",
      "2067/6056, train_loss: 0.1247\n",
      "2068/6056, train_loss: 0.1803\n",
      "2069/6056, train_loss: 0.2136\n",
      "2070/6056, train_loss: 0.1596\n",
      "2071/6056, train_loss: 0.1655\n",
      "2072/6056, train_loss: 0.1631\n",
      "2073/6056, train_loss: 0.2369\n",
      "2074/6056, train_loss: 0.1617\n",
      "2075/6056, train_loss: 0.1337\n",
      "2076/6056, train_loss: 0.1504\n",
      "2077/6056, train_loss: 0.2034\n",
      "2078/6056, train_loss: 0.2628\n",
      "2079/6056, train_loss: 0.1974\n",
      "2080/6056, train_loss: 0.1754\n",
      "2081/6056, train_loss: 0.2130\n",
      "2082/6056, train_loss: 0.2028\n",
      "2083/6056, train_loss: 0.1354\n",
      "2084/6056, train_loss: 0.2215\n",
      "2085/6056, train_loss: 0.1553\n",
      "2086/6056, train_loss: 0.3663\n",
      "2087/6056, train_loss: 0.2720\n",
      "2088/6056, train_loss: 0.2056\n",
      "2089/6056, train_loss: 0.2237\n",
      "2090/6056, train_loss: 0.1986\n",
      "2091/6056, train_loss: 0.2117\n",
      "2092/6056, train_loss: 0.2097\n",
      "2093/6056, train_loss: 0.2357\n",
      "2094/6056, train_loss: 0.1379\n",
      "2095/6056, train_loss: 0.2418\n",
      "2096/6056, train_loss: 0.1903\n",
      "2097/6056, train_loss: 0.2242\n",
      "2098/6056, train_loss: 0.2406\n",
      "2099/6056, train_loss: 0.2643\n",
      "2100/6056, train_loss: 0.1219\n",
      "2101/6056, train_loss: 0.1902\n",
      "2102/6056, train_loss: 0.2213\n",
      "2103/6056, train_loss: 0.1543\n",
      "2104/6056, train_loss: 0.2175\n",
      "2105/6056, train_loss: 0.1714\n",
      "2106/6056, train_loss: 0.1962\n",
      "2107/6056, train_loss: 0.2220\n",
      "2108/6056, train_loss: 0.2563\n",
      "2109/6056, train_loss: 0.3142\n",
      "2110/6056, train_loss: 0.2075\n",
      "2111/6056, train_loss: 0.1673\n",
      "2112/6056, train_loss: 0.1858\n",
      "2113/6056, train_loss: 0.1842\n",
      "2114/6056, train_loss: 0.1898\n",
      "2115/6056, train_loss: 0.1787\n",
      "2116/6056, train_loss: 0.1438\n",
      "2117/6056, train_loss: 0.2221\n",
      "2118/6056, train_loss: 0.1357\n",
      "2119/6056, train_loss: 0.1991\n",
      "2120/6056, train_loss: 0.1400\n",
      "2121/6056, train_loss: 0.3222\n",
      "2122/6056, train_loss: 0.1175\n",
      "2123/6056, train_loss: 0.2075\n",
      "2124/6056, train_loss: 0.2037\n",
      "2125/6056, train_loss: 0.1766\n",
      "2126/6056, train_loss: 0.1727\n",
      "2127/6056, train_loss: 0.2327\n",
      "2128/6056, train_loss: 0.1873\n",
      "2129/6056, train_loss: 0.1738\n",
      "2130/6056, train_loss: 0.2068\n",
      "2131/6056, train_loss: 0.0909\n",
      "2132/6056, train_loss: 0.1842\n",
      "2133/6056, train_loss: 0.1896\n",
      "2134/6056, train_loss: 0.2565\n",
      "2135/6056, train_loss: 0.1903\n",
      "2136/6056, train_loss: 0.2482\n",
      "2137/6056, train_loss: 0.2358\n",
      "2138/6056, train_loss: 0.1354\n",
      "2139/6056, train_loss: 0.2642\n",
      "2140/6056, train_loss: 0.2111\n",
      "2141/6056, train_loss: 0.2284\n",
      "2142/6056, train_loss: 0.1210\n",
      "2143/6056, train_loss: 0.1956\n",
      "2144/6056, train_loss: 0.2031\n",
      "2145/6056, train_loss: 0.2011\n",
      "2146/6056, train_loss: 0.1840\n",
      "2147/6056, train_loss: 0.1402\n",
      "2148/6056, train_loss: 0.1597\n",
      "2149/6056, train_loss: 0.1428\n",
      "2150/6056, train_loss: 0.3140\n",
      "2151/6056, train_loss: 0.2044\n",
      "2152/6056, train_loss: 0.1698\n",
      "2153/6056, train_loss: 0.1397\n",
      "2154/6056, train_loss: 0.1172\n",
      "2155/6056, train_loss: 0.1745\n",
      "2156/6056, train_loss: 0.1130\n",
      "2157/6056, train_loss: 0.1877\n",
      "2158/6056, train_loss: 0.1368\n",
      "2159/6056, train_loss: 0.1473\n",
      "2160/6056, train_loss: 0.1945\n",
      "2161/6056, train_loss: 0.1692\n",
      "2162/6056, train_loss: 0.2040\n",
      "2163/6056, train_loss: 0.1307\n",
      "2164/6056, train_loss: 0.1256\n",
      "2165/6056, train_loss: 0.1697\n",
      "2166/6056, train_loss: 0.1707\n",
      "2167/6056, train_loss: 0.2240\n",
      "2168/6056, train_loss: 0.2405\n",
      "2169/6056, train_loss: 0.2002\n",
      "2170/6056, train_loss: 0.1053\n",
      "2171/6056, train_loss: 0.2461\n",
      "2172/6056, train_loss: 0.1527\n",
      "2173/6056, train_loss: 0.1837\n",
      "2174/6056, train_loss: 0.1621\n",
      "2175/6056, train_loss: 0.2203\n",
      "2176/6056, train_loss: 0.0873\n",
      "2177/6056, train_loss: 0.1945\n",
      "2178/6056, train_loss: 0.0931\n",
      "2179/6056, train_loss: 0.1599\n",
      "2180/6056, train_loss: 0.1433\n",
      "2181/6056, train_loss: 0.1704\n",
      "2182/6056, train_loss: 0.1987\n",
      "2183/6056, train_loss: 0.1869\n",
      "2184/6056, train_loss: 0.1773\n",
      "2185/6056, train_loss: 0.2216\n",
      "2186/6056, train_loss: 0.2235\n",
      "2187/6056, train_loss: 0.1719\n",
      "2188/6056, train_loss: 0.2354\n",
      "2189/6056, train_loss: 0.1881\n",
      "2190/6056, train_loss: 0.1547\n",
      "2191/6056, train_loss: 0.2102\n",
      "2192/6056, train_loss: 0.2157\n",
      "2193/6056, train_loss: 0.1698\n",
      "2194/6056, train_loss: 0.1325\n",
      "2195/6056, train_loss: 0.2085\n",
      "2196/6056, train_loss: 0.1722\n",
      "2197/6056, train_loss: 0.2209\n",
      "2198/6056, train_loss: 0.2677\n",
      "2199/6056, train_loss: 0.2660\n",
      "2200/6056, train_loss: 0.2321\n",
      "2201/6056, train_loss: 0.2557\n",
      "2202/6056, train_loss: 0.2361\n",
      "2203/6056, train_loss: 0.1739\n",
      "2204/6056, train_loss: 0.1534\n",
      "2205/6056, train_loss: 0.2550\n",
      "2206/6056, train_loss: 0.1122\n",
      "2207/6056, train_loss: 0.2043\n",
      "2208/6056, train_loss: 0.1346\n",
      "2209/6056, train_loss: 0.2098\n",
      "2210/6056, train_loss: 0.2176\n",
      "2211/6056, train_loss: 0.1389\n",
      "2212/6056, train_loss: 0.1365\n",
      "2213/6056, train_loss: 0.2482\n",
      "2214/6056, train_loss: 0.1980\n",
      "2215/6056, train_loss: 0.2658\n",
      "2216/6056, train_loss: 0.2121\n",
      "2217/6056, train_loss: 0.2082\n",
      "2218/6056, train_loss: 0.1661\n",
      "2219/6056, train_loss: 0.1882\n",
      "2220/6056, train_loss: 0.2934\n",
      "2221/6056, train_loss: 0.1496\n",
      "2222/6056, train_loss: 0.1052\n",
      "2223/6056, train_loss: 0.1644\n",
      "2224/6056, train_loss: 0.1617\n",
      "2225/6056, train_loss: 0.1587\n",
      "2226/6056, train_loss: 0.2383\n",
      "2227/6056, train_loss: 0.1868\n",
      "2228/6056, train_loss: 0.1509\n",
      "2229/6056, train_loss: 0.1535\n",
      "2230/6056, train_loss: 0.2471\n",
      "2231/6056, train_loss: 0.2111\n",
      "2232/6056, train_loss: 0.2142\n",
      "2233/6056, train_loss: 0.1906\n",
      "2234/6056, train_loss: 0.2053\n",
      "2235/6056, train_loss: 0.1908\n",
      "2236/6056, train_loss: 0.2678\n",
      "2237/6056, train_loss: 0.1060\n",
      "2238/6056, train_loss: 0.1789\n",
      "2239/6056, train_loss: 0.1677\n",
      "2240/6056, train_loss: 0.2415\n",
      "2241/6056, train_loss: 0.1603\n",
      "2242/6056, train_loss: 0.1981\n",
      "2243/6056, train_loss: 0.2270\n",
      "2244/6056, train_loss: 0.1592\n",
      "2245/6056, train_loss: 0.2231\n",
      "2246/6056, train_loss: 0.1744\n",
      "2247/6056, train_loss: 0.2466\n",
      "2248/6056, train_loss: 0.1532\n",
      "2249/6056, train_loss: 0.1299\n",
      "2250/6056, train_loss: 0.1284\n",
      "2251/6056, train_loss: 0.2343\n",
      "2252/6056, train_loss: 0.1444\n",
      "2253/6056, train_loss: 0.2263\n",
      "2254/6056, train_loss: 0.2245\n",
      "2255/6056, train_loss: 0.2909\n",
      "2256/6056, train_loss: 0.1179\n",
      "2257/6056, train_loss: 0.1607\n",
      "2258/6056, train_loss: 0.2415\n",
      "2259/6056, train_loss: 0.1580\n",
      "2260/6056, train_loss: 0.2260\n",
      "2261/6056, train_loss: 0.1983\n",
      "2262/6056, train_loss: 0.1394\n",
      "2263/6056, train_loss: 0.1330\n",
      "2264/6056, train_loss: 0.1460\n",
      "2265/6056, train_loss: 0.1644\n",
      "2266/6056, train_loss: 0.1755\n",
      "2267/6056, train_loss: 0.2314\n",
      "2268/6056, train_loss: 0.1566\n",
      "2269/6056, train_loss: 0.2047\n",
      "2270/6056, train_loss: 0.2653\n",
      "2271/6056, train_loss: 0.1501\n",
      "2272/6056, train_loss: 0.1395\n",
      "2273/6056, train_loss: 0.2426\n",
      "2274/6056, train_loss: 0.1334\n",
      "2275/6056, train_loss: 0.1867\n",
      "2276/6056, train_loss: 0.2342\n",
      "2277/6056, train_loss: 0.2138\n",
      "2278/6056, train_loss: 0.1209\n",
      "2279/6056, train_loss: 0.2344\n",
      "2280/6056, train_loss: 0.1475\n",
      "2281/6056, train_loss: 0.2788\n",
      "2282/6056, train_loss: 0.2170\n",
      "2283/6056, train_loss: 0.2680\n",
      "2284/6056, train_loss: 0.2367\n",
      "2285/6056, train_loss: 0.2087\n",
      "2286/6056, train_loss: 0.1659\n",
      "2287/6056, train_loss: 0.1342\n",
      "2288/6056, train_loss: 0.1593\n",
      "2289/6056, train_loss: 0.1886\n",
      "2290/6056, train_loss: 0.2310\n",
      "2291/6056, train_loss: 0.1585\n",
      "2292/6056, train_loss: 0.2055\n",
      "2293/6056, train_loss: 0.1600\n",
      "2294/6056, train_loss: 0.2344\n",
      "2295/6056, train_loss: 0.1368\n",
      "2296/6056, train_loss: 0.1818\n",
      "2297/6056, train_loss: 0.1465\n",
      "2298/6056, train_loss: 0.1404\n",
      "2299/6056, train_loss: 0.2835\n",
      "2300/6056, train_loss: 0.1952\n",
      "2301/6056, train_loss: 0.2105\n",
      "2302/6056, train_loss: 0.1099\n",
      "2303/6056, train_loss: 0.1871\n",
      "2304/6056, train_loss: 0.1255\n",
      "2305/6056, train_loss: 0.1612\n",
      "2306/6056, train_loss: 0.1588\n",
      "2307/6056, train_loss: 0.2008\n",
      "2308/6056, train_loss: 0.1500\n",
      "2309/6056, train_loss: 0.2031\n",
      "2310/6056, train_loss: 0.1855\n",
      "2311/6056, train_loss: 0.2109\n",
      "2312/6056, train_loss: 0.1658\n",
      "2313/6056, train_loss: 0.2293\n",
      "2314/6056, train_loss: 0.2247\n",
      "2315/6056, train_loss: 0.1384\n",
      "2316/6056, train_loss: 0.2335\n",
      "2317/6056, train_loss: 0.1580\n",
      "2318/6056, train_loss: 0.1772\n",
      "2319/6056, train_loss: 0.1862\n",
      "2320/6056, train_loss: 0.2822\n",
      "2321/6056, train_loss: 0.1761\n",
      "2322/6056, train_loss: 0.1195\n",
      "2323/6056, train_loss: 0.1072\n",
      "2324/6056, train_loss: 0.3046\n",
      "2325/6056, train_loss: 0.1369\n",
      "2326/6056, train_loss: 0.2241\n",
      "2327/6056, train_loss: 0.2116\n",
      "2328/6056, train_loss: 0.2472\n",
      "2329/6056, train_loss: 0.2641\n",
      "2330/6056, train_loss: 0.2122\n",
      "2331/6056, train_loss: 0.2013\n",
      "2332/6056, train_loss: 0.2426\n",
      "2333/6056, train_loss: 0.2595\n",
      "2334/6056, train_loss: 0.1616\n",
      "2335/6056, train_loss: 0.1428\n",
      "2336/6056, train_loss: 0.1095\n",
      "2337/6056, train_loss: 0.2256\n",
      "2338/6056, train_loss: 0.2020\n",
      "2339/6056, train_loss: 0.2163\n",
      "2340/6056, train_loss: 0.1661\n",
      "2341/6056, train_loss: 0.2786\n",
      "2342/6056, train_loss: 0.2890\n",
      "2343/6056, train_loss: 0.2079\n",
      "2344/6056, train_loss: 0.2785\n",
      "2345/6056, train_loss: 0.2257\n",
      "2346/6056, train_loss: 0.3199\n",
      "2347/6056, train_loss: 0.1608\n",
      "2348/6056, train_loss: 0.1333\n",
      "2349/6056, train_loss: 0.1863\n",
      "2350/6056, train_loss: 0.2016\n",
      "2351/6056, train_loss: 0.1830\n",
      "2352/6056, train_loss: 0.1988\n",
      "2353/6056, train_loss: 0.2893\n",
      "2354/6056, train_loss: 0.3311\n",
      "2355/6056, train_loss: 0.1337\n",
      "2356/6056, train_loss: 0.2276\n",
      "2357/6056, train_loss: 0.1306\n",
      "2358/6056, train_loss: 0.1598\n",
      "2359/6056, train_loss: 0.1525\n",
      "2360/6056, train_loss: 0.1846\n",
      "2361/6056, train_loss: 0.2944\n",
      "2362/6056, train_loss: 0.1335\n",
      "2363/6056, train_loss: 0.2444\n",
      "2364/6056, train_loss: 0.0846\n",
      "2365/6056, train_loss: 0.1553\n",
      "2366/6056, train_loss: 0.0860\n",
      "2367/6056, train_loss: 0.2323\n",
      "2368/6056, train_loss: 0.1866\n",
      "2369/6056, train_loss: 0.1560\n",
      "2370/6056, train_loss: 0.2001\n",
      "2371/6056, train_loss: 0.2183\n",
      "2372/6056, train_loss: 0.1399\n",
      "2373/6056, train_loss: 0.2194\n",
      "2374/6056, train_loss: 0.1851\n",
      "2375/6056, train_loss: 0.2297\n",
      "2376/6056, train_loss: 0.1502\n",
      "2377/6056, train_loss: 0.1601\n",
      "2378/6056, train_loss: 0.1835\n",
      "2379/6056, train_loss: 0.1032\n",
      "2380/6056, train_loss: 0.1326\n",
      "2381/6056, train_loss: 0.1706\n",
      "2382/6056, train_loss: 0.2163\n",
      "2383/6056, train_loss: 0.1813\n",
      "2384/6056, train_loss: 0.1698\n",
      "2385/6056, train_loss: 0.1649\n",
      "2386/6056, train_loss: 0.2209\n",
      "2387/6056, train_loss: 0.1750\n",
      "2388/6056, train_loss: 0.1434\n",
      "2389/6056, train_loss: 0.1979\n",
      "2390/6056, train_loss: 0.1370\n",
      "2391/6056, train_loss: 0.2398\n",
      "2392/6056, train_loss: 0.2031\n",
      "2393/6056, train_loss: 0.1850\n",
      "2394/6056, train_loss: 0.2010\n",
      "2395/6056, train_loss: 0.1158\n",
      "2396/6056, train_loss: 0.2389\n",
      "2397/6056, train_loss: 0.1651\n",
      "2398/6056, train_loss: 0.3222\n",
      "2399/6056, train_loss: 0.1388\n",
      "2400/6056, train_loss: 0.2309\n",
      "2401/6056, train_loss: 0.2353\n",
      "2402/6056, train_loss: 0.1779\n",
      "2403/6056, train_loss: 0.1722\n",
      "2404/6056, train_loss: 0.1899\n",
      "2405/6056, train_loss: 0.1365\n",
      "2406/6056, train_loss: 0.1936\n",
      "2407/6056, train_loss: 0.2850\n",
      "2408/6056, train_loss: 0.1061\n",
      "2409/6056, train_loss: 0.1343\n",
      "2410/6056, train_loss: 0.2488\n",
      "2411/6056, train_loss: 0.1938\n",
      "2412/6056, train_loss: 0.2409\n",
      "2413/6056, train_loss: 0.2074\n",
      "2414/6056, train_loss: 0.1507\n",
      "2415/6056, train_loss: 0.1462\n",
      "2416/6056, train_loss: 0.2223\n",
      "2417/6056, train_loss: 0.1478\n",
      "2418/6056, train_loss: 0.2377\n",
      "2419/6056, train_loss: 0.1521\n",
      "2420/6056, train_loss: 0.2491\n",
      "2421/6056, train_loss: 0.1362\n",
      "2422/6056, train_loss: 0.3300\n",
      "2423/6056, train_loss: 0.1520\n",
      "2424/6056, train_loss: 0.2157\n",
      "2425/6056, train_loss: 0.2015\n",
      "2426/6056, train_loss: 0.1489\n",
      "2427/6056, train_loss: 0.1766\n",
      "2428/6056, train_loss: 0.1534\n",
      "2429/6056, train_loss: 0.1817\n",
      "2430/6056, train_loss: 0.2067\n",
      "2431/6056, train_loss: 0.2647\n",
      "2432/6056, train_loss: 0.1793\n",
      "2433/6056, train_loss: 0.2440\n",
      "2434/6056, train_loss: 0.2458\n",
      "2435/6056, train_loss: 0.2857\n",
      "2436/6056, train_loss: 0.1392\n",
      "2437/6056, train_loss: 0.2277\n",
      "2438/6056, train_loss: 0.1317\n",
      "2439/6056, train_loss: 0.1901\n",
      "2440/6056, train_loss: 0.1709\n",
      "2441/6056, train_loss: 0.2388\n",
      "2442/6056, train_loss: 0.2276\n",
      "2443/6056, train_loss: 0.2662\n",
      "2444/6056, train_loss: 0.1358\n",
      "2445/6056, train_loss: 0.1162\n",
      "2446/6056, train_loss: 0.1900\n",
      "2447/6056, train_loss: 0.2413\n",
      "2448/6056, train_loss: 0.2291\n",
      "2449/6056, train_loss: 0.1348\n",
      "2450/6056, train_loss: 0.1386\n",
      "2451/6056, train_loss: 0.1546\n",
      "2452/6056, train_loss: 0.1926\n",
      "2453/6056, train_loss: 0.2584\n",
      "2454/6056, train_loss: 0.2662\n",
      "2455/6056, train_loss: 0.2319\n",
      "2456/6056, train_loss: 0.1424\n",
      "2457/6056, train_loss: 0.2421\n",
      "2458/6056, train_loss: 0.1748\n",
      "2459/6056, train_loss: 0.1826\n",
      "2460/6056, train_loss: 0.1548\n",
      "2461/6056, train_loss: 0.2216\n",
      "2462/6056, train_loss: 0.2520\n",
      "2463/6056, train_loss: 0.2140\n",
      "2464/6056, train_loss: 0.1487\n",
      "2465/6056, train_loss: 0.2083\n",
      "2466/6056, train_loss: 0.1813\n",
      "2467/6056, train_loss: 0.2440\n",
      "2468/6056, train_loss: 0.2037\n",
      "2469/6056, train_loss: 0.2095\n",
      "2470/6056, train_loss: 0.2528\n",
      "2471/6056, train_loss: 0.2076\n",
      "2472/6056, train_loss: 0.1962\n",
      "2473/6056, train_loss: 0.1985\n",
      "2474/6056, train_loss: 0.2495\n",
      "2475/6056, train_loss: 0.2528\n",
      "2476/6056, train_loss: 0.2192\n",
      "2477/6056, train_loss: 0.2543\n",
      "2478/6056, train_loss: 0.1091\n",
      "2479/6056, train_loss: 0.1509\n",
      "2480/6056, train_loss: 0.1721\n",
      "2481/6056, train_loss: 0.1871\n",
      "2482/6056, train_loss: 0.1672\n",
      "2483/6056, train_loss: 0.2785\n",
      "2484/6056, train_loss: 0.2300\n",
      "2485/6056, train_loss: 0.0980\n",
      "2486/6056, train_loss: 0.2106\n",
      "2487/6056, train_loss: 0.2888\n",
      "2488/6056, train_loss: 0.1958\n",
      "2489/6056, train_loss: 0.2331\n",
      "2490/6056, train_loss: 0.1808\n",
      "2491/6056, train_loss: 0.2033\n",
      "2492/6056, train_loss: 0.1321\n",
      "2493/6056, train_loss: 0.2059\n",
      "2494/6056, train_loss: 0.2975\n",
      "2495/6056, train_loss: 0.1306\n",
      "2496/6056, train_loss: 0.2055\n",
      "2497/6056, train_loss: 0.2682\n",
      "2498/6056, train_loss: 0.2061\n",
      "2499/6056, train_loss: 0.1814\n",
      "2500/6056, train_loss: 0.2107\n",
      "2501/6056, train_loss: 0.1667\n",
      "2502/6056, train_loss: 0.2372\n",
      "2503/6056, train_loss: 0.2142\n",
      "2504/6056, train_loss: 0.2512\n",
      "2505/6056, train_loss: 0.1411\n",
      "2506/6056, train_loss: 0.2286\n",
      "2507/6056, train_loss: 0.2086\n",
      "2508/6056, train_loss: 0.2487\n",
      "2509/6056, train_loss: 0.2496\n",
      "2510/6056, train_loss: 0.1519\n",
      "2511/6056, train_loss: 0.1875\n",
      "2512/6056, train_loss: 0.2600\n",
      "2513/6056, train_loss: 0.1088\n",
      "2514/6056, train_loss: 0.2295\n",
      "2515/6056, train_loss: 0.2934\n",
      "2516/6056, train_loss: 0.1602\n",
      "2517/6056, train_loss: 0.1842\n",
      "2518/6056, train_loss: 0.1611\n",
      "2519/6056, train_loss: 0.1523\n",
      "2520/6056, train_loss: 0.2280\n",
      "2521/6056, train_loss: 0.1488\n",
      "2522/6056, train_loss: 0.1699\n",
      "2523/6056, train_loss: 0.2786\n",
      "2524/6056, train_loss: 0.1827\n",
      "2525/6056, train_loss: 0.1565\n",
      "2526/6056, train_loss: 0.1473\n",
      "2527/6056, train_loss: 0.1926\n",
      "2528/6056, train_loss: 0.2746\n",
      "2529/6056, train_loss: 0.1217\n",
      "2530/6056, train_loss: 0.1770\n",
      "2531/6056, train_loss: 0.2277\n",
      "2532/6056, train_loss: 0.2357\n",
      "2533/6056, train_loss: 0.1867\n",
      "2534/6056, train_loss: 0.2027\n",
      "2535/6056, train_loss: 0.2348\n",
      "2536/6056, train_loss: 0.1818\n",
      "2537/6056, train_loss: 0.2035\n",
      "2538/6056, train_loss: 0.1241\n",
      "2539/6056, train_loss: 0.1849\n",
      "2540/6056, train_loss: 0.1733\n",
      "2541/6056, train_loss: 0.1766\n",
      "2542/6056, train_loss: 0.1426\n",
      "2543/6056, train_loss: 0.2060\n",
      "2544/6056, train_loss: 0.1606\n",
      "2545/6056, train_loss: 0.1510\n",
      "2546/6056, train_loss: 0.2242\n",
      "2547/6056, train_loss: 0.2595\n",
      "2548/6056, train_loss: 0.1824\n",
      "2549/6056, train_loss: 0.1458\n",
      "2550/6056, train_loss: 0.1337\n",
      "2551/6056, train_loss: 0.0877\n",
      "2552/6056, train_loss: 0.1733\n",
      "2553/6056, train_loss: 0.1994\n",
      "2554/6056, train_loss: 0.1642\n",
      "2555/6056, train_loss: 0.1546\n",
      "2556/6056, train_loss: 0.1351\n",
      "2557/6056, train_loss: 0.1618\n",
      "2558/6056, train_loss: 0.1973\n",
      "2559/6056, train_loss: 0.2580\n",
      "2560/6056, train_loss: 0.2746\n",
      "2561/6056, train_loss: 0.1491\n",
      "2562/6056, train_loss: 0.2009\n",
      "2563/6056, train_loss: 0.2594\n",
      "2564/6056, train_loss: 0.1664\n",
      "2565/6056, train_loss: 0.1660\n",
      "2566/6056, train_loss: 0.1902\n",
      "2567/6056, train_loss: 0.1907\n",
      "2568/6056, train_loss: 0.1543\n",
      "2569/6056, train_loss: 0.1609\n",
      "2570/6056, train_loss: 0.1979\n",
      "2571/6056, train_loss: 0.0950\n",
      "2572/6056, train_loss: 0.1323\n",
      "2573/6056, train_loss: 0.1396\n",
      "2574/6056, train_loss: 0.2521\n",
      "2575/6056, train_loss: 0.1747\n",
      "2576/6056, train_loss: 0.1808\n",
      "2577/6056, train_loss: 0.1760\n",
      "2578/6056, train_loss: 0.1685\n",
      "2579/6056, train_loss: 0.2305\n",
      "2580/6056, train_loss: 0.2927\n",
      "2581/6056, train_loss: 0.1251\n",
      "2582/6056, train_loss: 0.1127\n",
      "2583/6056, train_loss: 0.2106\n",
      "2584/6056, train_loss: 0.2086\n",
      "2585/6056, train_loss: 0.2318\n",
      "2586/6056, train_loss: 0.2026\n",
      "2587/6056, train_loss: 0.0983\n",
      "2588/6056, train_loss: 0.2686\n",
      "2589/6056, train_loss: 0.1808\n",
      "2590/6056, train_loss: 0.1729\n",
      "2591/6056, train_loss: 0.1632\n",
      "2592/6056, train_loss: 0.1589\n",
      "2593/6056, train_loss: 0.2067\n",
      "2594/6056, train_loss: 0.1657\n",
      "2595/6056, train_loss: 0.1259\n",
      "2596/6056, train_loss: 0.2284\n",
      "2597/6056, train_loss: 0.1852\n",
      "2598/6056, train_loss: 0.2673\n",
      "2599/6056, train_loss: 0.1751\n",
      "2600/6056, train_loss: 0.1175\n",
      "2601/6056, train_loss: 0.2032\n",
      "2602/6056, train_loss: 0.2039\n",
      "2603/6056, train_loss: 0.1579\n",
      "2604/6056, train_loss: 0.1449\n",
      "2605/6056, train_loss: 0.2307\n",
      "2606/6056, train_loss: 0.1926\n",
      "2607/6056, train_loss: 0.1762\n",
      "2608/6056, train_loss: 0.2294\n",
      "2609/6056, train_loss: 0.1981\n",
      "2610/6056, train_loss: 0.1614\n",
      "2611/6056, train_loss: 0.1673\n",
      "2612/6056, train_loss: 0.1624\n",
      "2613/6056, train_loss: 0.1530\n",
      "2614/6056, train_loss: 0.1666\n",
      "2615/6056, train_loss: 0.1812\n",
      "2616/6056, train_loss: 0.2090\n",
      "2617/6056, train_loss: 0.2689\n",
      "2618/6056, train_loss: 0.2435\n",
      "2619/6056, train_loss: 0.2173\n",
      "2620/6056, train_loss: 0.1229\n",
      "2621/6056, train_loss: 0.1993\n",
      "2622/6056, train_loss: 0.1638\n",
      "2623/6056, train_loss: 0.1736\n",
      "2624/6056, train_loss: 0.2081\n",
      "2625/6056, train_loss: 0.2883\n",
      "2626/6056, train_loss: 0.1446\n",
      "2627/6056, train_loss: 0.1504\n",
      "2628/6056, train_loss: 0.2194\n",
      "2629/6056, train_loss: 0.3044\n",
      "2630/6056, train_loss: 0.1909\n",
      "2631/6056, train_loss: 0.2016\n",
      "2632/6056, train_loss: 0.1452\n",
      "2633/6056, train_loss: 0.2543\n",
      "2634/6056, train_loss: 0.1858\n",
      "2635/6056, train_loss: 0.1222\n",
      "2636/6056, train_loss: 0.1823\n",
      "2637/6056, train_loss: 0.1342\n",
      "2638/6056, train_loss: 0.2525\n",
      "2639/6056, train_loss: 0.2425\n",
      "2640/6056, train_loss: 0.1869\n",
      "2641/6056, train_loss: 0.3094\n",
      "2642/6056, train_loss: 0.1373\n",
      "2643/6056, train_loss: 0.1171\n",
      "2644/6056, train_loss: 0.2401\n",
      "2645/6056, train_loss: 0.1897\n",
      "2646/6056, train_loss: 0.2152\n",
      "2647/6056, train_loss: 0.1558\n",
      "2648/6056, train_loss: 0.1514\n",
      "2649/6056, train_loss: 0.1837\n",
      "2650/6056, train_loss: 0.1647\n",
      "2651/6056, train_loss: 0.2740\n",
      "2652/6056, train_loss: 0.2672\n",
      "2653/6056, train_loss: 0.1576\n",
      "2654/6056, train_loss: 0.2058\n",
      "2655/6056, train_loss: 0.2696\n",
      "2656/6056, train_loss: 0.1812\n",
      "2657/6056, train_loss: 0.1649\n",
      "2658/6056, train_loss: 0.1987\n",
      "2659/6056, train_loss: 0.1255\n",
      "2660/6056, train_loss: 0.1929\n",
      "2661/6056, train_loss: 0.1773\n",
      "2662/6056, train_loss: 0.1890\n",
      "2663/6056, train_loss: 0.1611\n",
      "2664/6056, train_loss: 0.1716\n",
      "2665/6056, train_loss: 0.1975\n",
      "2666/6056, train_loss: 0.1214\n",
      "2667/6056, train_loss: 0.1218\n",
      "2668/6056, train_loss: 0.3184\n",
      "2669/6056, train_loss: 0.1342\n",
      "2670/6056, train_loss: 0.1272\n",
      "2671/6056, train_loss: 0.0920\n",
      "2672/6056, train_loss: 0.2278\n",
      "2673/6056, train_loss: 0.2480\n",
      "2674/6056, train_loss: 0.2767\n",
      "2675/6056, train_loss: 0.1421\n",
      "2676/6056, train_loss: 0.1827\n",
      "2677/6056, train_loss: 0.1821\n",
      "2678/6056, train_loss: 0.1968\n",
      "2679/6056, train_loss: 0.1750\n",
      "2680/6056, train_loss: 0.2092\n",
      "2681/6056, train_loss: 0.2737\n",
      "2682/6056, train_loss: 0.1483\n",
      "2683/6056, train_loss: 0.2233\n",
      "2684/6056, train_loss: 0.2342\n",
      "2685/6056, train_loss: 0.1387\n",
      "2686/6056, train_loss: 0.1617\n",
      "2687/6056, train_loss: 0.1625\n",
      "2688/6056, train_loss: 0.1622\n",
      "2689/6056, train_loss: 0.2280\n",
      "2690/6056, train_loss: 0.2098\n",
      "2691/6056, train_loss: 0.1621\n",
      "2692/6056, train_loss: 0.1525\n",
      "2693/6056, train_loss: 0.1540\n",
      "2694/6056, train_loss: 0.1159\n",
      "2695/6056, train_loss: 0.1869\n",
      "2696/6056, train_loss: 0.2132\n",
      "2697/6056, train_loss: 0.1365\n",
      "2698/6056, train_loss: 0.0849\n",
      "2699/6056, train_loss: 0.1991\n",
      "2700/6056, train_loss: 0.1603\n",
      "2701/6056, train_loss: 0.1659\n",
      "2702/6056, train_loss: 0.2238\n",
      "2703/6056, train_loss: 0.1372\n",
      "2704/6056, train_loss: 0.2271\n",
      "2705/6056, train_loss: 0.2012\n",
      "2706/6056, train_loss: 0.1348\n",
      "2707/6056, train_loss: 0.2496\n",
      "2708/6056, train_loss: 0.1373\n",
      "2709/6056, train_loss: 0.1577\n",
      "2710/6056, train_loss: 0.1877\n",
      "2711/6056, train_loss: 0.2749\n",
      "2712/6056, train_loss: 0.1310\n",
      "2713/6056, train_loss: 0.1890\n",
      "2714/6056, train_loss: 0.1246\n",
      "2715/6056, train_loss: 0.1169\n",
      "2716/6056, train_loss: 0.2248\n",
      "2717/6056, train_loss: 0.1631\n",
      "2718/6056, train_loss: 0.2466\n",
      "2719/6056, train_loss: 0.1243\n",
      "2720/6056, train_loss: 0.1779\n",
      "2721/6056, train_loss: 0.2695\n",
      "2722/6056, train_loss: 0.2048\n",
      "2723/6056, train_loss: 0.1856\n",
      "2724/6056, train_loss: 0.2210\n",
      "2725/6056, train_loss: 0.1739\n",
      "2726/6056, train_loss: 0.1866\n",
      "2727/6056, train_loss: 0.2235\n",
      "2728/6056, train_loss: 0.1357\n",
      "2729/6056, train_loss: 0.1579\n",
      "2730/6056, train_loss: 0.1909\n",
      "2731/6056, train_loss: 0.1486\n",
      "2732/6056, train_loss: 0.1952\n",
      "2733/6056, train_loss: 0.2070\n",
      "2734/6056, train_loss: 0.1269\n",
      "2735/6056, train_loss: 0.1845\n",
      "2736/6056, train_loss: 0.1101\n",
      "2737/6056, train_loss: 0.2303\n",
      "2738/6056, train_loss: 0.3331\n",
      "2739/6056, train_loss: 0.2661\n",
      "2740/6056, train_loss: 0.1526\n",
      "2741/6056, train_loss: 0.1470\n",
      "2742/6056, train_loss: 0.1356\n",
      "2743/6056, train_loss: 0.1963\n",
      "2744/6056, train_loss: 0.1373\n",
      "2745/6056, train_loss: 0.2414\n",
      "2746/6056, train_loss: 0.2198\n",
      "2747/6056, train_loss: 0.1048\n",
      "2748/6056, train_loss: 0.1888\n",
      "2749/6056, train_loss: 0.1792\n",
      "2750/6056, train_loss: 0.1039\n",
      "2751/6056, train_loss: 0.1850\n",
      "2752/6056, train_loss: 0.2977\n",
      "2753/6056, train_loss: 0.3610\n",
      "2754/6056, train_loss: 0.2124\n",
      "2755/6056, train_loss: 0.1846\n",
      "2756/6056, train_loss: 0.1306\n",
      "2757/6056, train_loss: 0.2232\n",
      "2758/6056, train_loss: 0.1318\n",
      "2759/6056, train_loss: 0.2481\n",
      "2760/6056, train_loss: 0.1995\n",
      "2761/6056, train_loss: 0.1734\n",
      "2762/6056, train_loss: 0.1524\n",
      "2763/6056, train_loss: 0.1987\n",
      "2764/6056, train_loss: 0.2264\n",
      "2765/6056, train_loss: 0.1937\n",
      "2766/6056, train_loss: 0.2300\n",
      "2767/6056, train_loss: 0.2403\n",
      "2768/6056, train_loss: 0.2841\n",
      "2769/6056, train_loss: 0.1985\n",
      "2770/6056, train_loss: 0.1440\n",
      "2771/6056, train_loss: 0.1160\n",
      "2772/6056, train_loss: 0.1458\n",
      "2773/6056, train_loss: 0.2129\n",
      "2774/6056, train_loss: 0.2260\n",
      "2775/6056, train_loss: 0.1742\n",
      "2776/6056, train_loss: 0.2325\n",
      "2777/6056, train_loss: 0.1673\n",
      "2778/6056, train_loss: 0.1785\n",
      "2779/6056, train_loss: 0.1617\n",
      "2780/6056, train_loss: 0.1572\n",
      "2781/6056, train_loss: 0.2490\n",
      "2782/6056, train_loss: 0.1480\n",
      "2783/6056, train_loss: 0.2338\n",
      "2784/6056, train_loss: 0.1866\n",
      "2785/6056, train_loss: 0.2441\n",
      "2786/6056, train_loss: 0.1892\n",
      "2787/6056, train_loss: 0.1431\n",
      "2788/6056, train_loss: 0.2513\n",
      "2789/6056, train_loss: 0.1846\n",
      "2790/6056, train_loss: 0.2527\n",
      "2791/6056, train_loss: 0.3416\n",
      "2792/6056, train_loss: 0.2225\n",
      "2793/6056, train_loss: 0.1200\n",
      "2794/6056, train_loss: 0.1987\n",
      "2795/6056, train_loss: 0.1159\n",
      "2796/6056, train_loss: 0.1825\n",
      "2797/6056, train_loss: 0.2262\n",
      "2798/6056, train_loss: 0.1506\n",
      "2799/6056, train_loss: 0.2336\n",
      "2800/6056, train_loss: 0.1655\n",
      "2801/6056, train_loss: 0.2169\n",
      "2802/6056, train_loss: 0.2252\n",
      "2803/6056, train_loss: 0.1082\n",
      "2804/6056, train_loss: 0.2420\n",
      "2805/6056, train_loss: 0.2128\n",
      "2806/6056, train_loss: 0.1575\n",
      "2807/6056, train_loss: 0.1596\n",
      "2808/6056, train_loss: 0.1068\n",
      "2809/6056, train_loss: 0.2282\n",
      "2810/6056, train_loss: 0.2257\n",
      "2811/6056, train_loss: 0.1361\n",
      "2812/6056, train_loss: 0.1718\n",
      "2813/6056, train_loss: 0.2642\n",
      "2814/6056, train_loss: 0.2316\n",
      "2815/6056, train_loss: 0.1564\n",
      "2816/6056, train_loss: 0.1298\n",
      "2817/6056, train_loss: 0.2957\n",
      "2818/6056, train_loss: 0.2933\n",
      "2819/6056, train_loss: 0.1422\n",
      "2820/6056, train_loss: 0.2252\n",
      "2821/6056, train_loss: 0.1717\n",
      "2822/6056, train_loss: 0.2107\n",
      "2823/6056, train_loss: 0.2490\n",
      "2824/6056, train_loss: 0.1381\n",
      "2825/6056, train_loss: 0.2680\n",
      "2826/6056, train_loss: 0.1988\n",
      "2827/6056, train_loss: 0.1816\n",
      "2828/6056, train_loss: 0.2818\n",
      "2829/6056, train_loss: 0.1599\n",
      "2830/6056, train_loss: 0.2944\n",
      "2831/6056, train_loss: 0.2013\n",
      "2832/6056, train_loss: 0.2106\n",
      "2833/6056, train_loss: 0.1729\n",
      "2834/6056, train_loss: 0.2683\n",
      "2835/6056, train_loss: 0.2652\n",
      "2836/6056, train_loss: 0.1584\n",
      "2837/6056, train_loss: 0.1441\n",
      "2838/6056, train_loss: 0.1868\n",
      "2839/6056, train_loss: 0.2657\n",
      "2840/6056, train_loss: 0.1759\n",
      "2841/6056, train_loss: 0.2088\n",
      "2842/6056, train_loss: 0.1218\n",
      "2843/6056, train_loss: 0.1179\n",
      "2844/6056, train_loss: 0.2399\n",
      "2845/6056, train_loss: 0.2942\n",
      "2846/6056, train_loss: 0.1861\n",
      "2847/6056, train_loss: 0.1832\n",
      "2848/6056, train_loss: 0.1383\n",
      "2849/6056, train_loss: 0.1548\n",
      "2850/6056, train_loss: 0.2542\n",
      "2851/6056, train_loss: 0.2183\n",
      "2852/6056, train_loss: 0.2400\n",
      "2853/6056, train_loss: 0.1255\n",
      "2854/6056, train_loss: 0.1406\n",
      "2855/6056, train_loss: 0.2524\n",
      "2856/6056, train_loss: 0.1850\n",
      "2857/6056, train_loss: 0.1920\n",
      "2858/6056, train_loss: 0.2096\n",
      "2859/6056, train_loss: 0.2827\n",
      "2860/6056, train_loss: 0.1871\n",
      "2861/6056, train_loss: 0.0846\n",
      "2862/6056, train_loss: 0.1103\n",
      "2863/6056, train_loss: 0.1434\n",
      "2864/6056, train_loss: 0.1370\n",
      "2865/6056, train_loss: 0.1833\n",
      "2866/6056, train_loss: 0.1520\n",
      "2867/6056, train_loss: 0.2472\n",
      "2868/6056, train_loss: 0.1779\n",
      "2869/6056, train_loss: 0.1643\n",
      "2870/6056, train_loss: 0.2804\n",
      "2871/6056, train_loss: 0.1149\n",
      "2872/6056, train_loss: 0.1561\n",
      "2873/6056, train_loss: 0.2656\n",
      "2874/6056, train_loss: 0.1638\n",
      "2875/6056, train_loss: 0.1734\n",
      "2876/6056, train_loss: 0.1850\n",
      "2877/6056, train_loss: 0.1908\n",
      "2878/6056, train_loss: 0.1882\n",
      "2879/6056, train_loss: 0.2102\n",
      "2880/6056, train_loss: 0.1249\n",
      "2881/6056, train_loss: 0.2183\n",
      "2882/6056, train_loss: 0.1487\n",
      "2883/6056, train_loss: 0.1729\n",
      "2884/6056, train_loss: 0.1608\n",
      "2885/6056, train_loss: 0.2052\n",
      "2886/6056, train_loss: 0.2487\n",
      "2887/6056, train_loss: 0.2127\n",
      "2888/6056, train_loss: 0.1411\n",
      "2889/6056, train_loss: 0.2487\n",
      "2890/6056, train_loss: 0.1831\n",
      "2891/6056, train_loss: 0.1541\n",
      "2892/6056, train_loss: 0.1054\n",
      "2893/6056, train_loss: 0.1783\n",
      "2894/6056, train_loss: 0.2439\n",
      "2895/6056, train_loss: 0.2012\n",
      "2896/6056, train_loss: 0.3041\n",
      "2897/6056, train_loss: 0.1705\n",
      "2898/6056, train_loss: 0.1650\n",
      "2899/6056, train_loss: 0.2246\n",
      "2900/6056, train_loss: 0.1873\n",
      "2901/6056, train_loss: 0.1470\n",
      "2902/6056, train_loss: 0.1885\n",
      "2903/6056, train_loss: 0.1801\n",
      "2904/6056, train_loss: 0.2122\n",
      "2905/6056, train_loss: 0.1748\n",
      "2906/6056, train_loss: 0.3010\n",
      "2907/6056, train_loss: 0.1750\n",
      "2908/6056, train_loss: 0.1698\n",
      "2909/6056, train_loss: 0.2049\n",
      "2910/6056, train_loss: 0.2233\n",
      "2911/6056, train_loss: 0.1732\n",
      "2912/6056, train_loss: 0.2507\n",
      "2913/6056, train_loss: 0.1890\n",
      "2914/6056, train_loss: 0.2080\n",
      "2915/6056, train_loss: 0.2047\n",
      "2916/6056, train_loss: 0.1659\n",
      "2917/6056, train_loss: 0.1586\n",
      "2918/6056, train_loss: 0.2445\n",
      "2919/6056, train_loss: 0.1495\n",
      "2920/6056, train_loss: 0.1162\n",
      "2921/6056, train_loss: 0.2123\n",
      "2922/6056, train_loss: 0.2537\n",
      "2923/6056, train_loss: 0.1136\n",
      "2924/6056, train_loss: 0.1161\n",
      "2925/6056, train_loss: 0.1502\n",
      "2926/6056, train_loss: 0.1913\n",
      "2927/6056, train_loss: 0.2118\n",
      "2928/6056, train_loss: 0.1256\n",
      "2929/6056, train_loss: 0.1849\n",
      "2930/6056, train_loss: 0.1995\n",
      "2931/6056, train_loss: 0.1520\n",
      "2932/6056, train_loss: 0.2783\n",
      "2933/6056, train_loss: 0.1849\n",
      "2934/6056, train_loss: 0.2708\n",
      "2935/6056, train_loss: 0.1014\n",
      "2936/6056, train_loss: 0.1528\n",
      "2937/6056, train_loss: 0.2109\n",
      "2938/6056, train_loss: 0.2131\n",
      "2939/6056, train_loss: 0.1221\n",
      "2940/6056, train_loss: 0.2388\n",
      "2941/6056, train_loss: 0.1774\n",
      "2942/6056, train_loss: 0.1708\n",
      "2943/6056, train_loss: 0.1491\n",
      "2944/6056, train_loss: 0.1492\n",
      "2945/6056, train_loss: 0.1200\n",
      "2946/6056, train_loss: 0.1110\n",
      "2947/6056, train_loss: 0.1779\n",
      "2948/6056, train_loss: 0.2412\n",
      "2949/6056, train_loss: 0.2135\n",
      "2950/6056, train_loss: 0.1789\n",
      "2951/6056, train_loss: 0.1892\n",
      "2952/6056, train_loss: 0.1739\n",
      "2953/6056, train_loss: 0.1746\n",
      "2954/6056, train_loss: 0.2157\n",
      "2955/6056, train_loss: 0.1540\n",
      "2956/6056, train_loss: 0.1496\n",
      "2957/6056, train_loss: 0.2382\n",
      "2958/6056, train_loss: 0.2064\n",
      "2959/6056, train_loss: 0.1244\n",
      "2960/6056, train_loss: 0.1639\n",
      "2961/6056, train_loss: 0.1777\n",
      "2962/6056, train_loss: 0.1168\n",
      "2963/6056, train_loss: 0.0825\n",
      "2964/6056, train_loss: 0.2039\n",
      "2965/6056, train_loss: 0.2165\n",
      "2966/6056, train_loss: 0.1377\n",
      "2967/6056, train_loss: 0.3359\n",
      "2968/6056, train_loss: 0.2578\n",
      "2969/6056, train_loss: 0.1634\n",
      "2970/6056, train_loss: 0.2028\n",
      "2971/6056, train_loss: 0.2052\n",
      "2972/6056, train_loss: 0.1257\n",
      "2973/6056, train_loss: 0.1635\n",
      "2974/6056, train_loss: 0.1950\n",
      "2975/6056, train_loss: 0.2690\n",
      "2976/6056, train_loss: 0.1963\n",
      "2977/6056, train_loss: 0.2814\n",
      "2978/6056, train_loss: 0.2393\n",
      "2979/6056, train_loss: 0.1893\n",
      "2980/6056, train_loss: 0.1949\n",
      "2981/6056, train_loss: 0.2237\n",
      "2982/6056, train_loss: 0.2184\n",
      "2983/6056, train_loss: 0.1843\n",
      "2984/6056, train_loss: 0.3029\n",
      "2985/6056, train_loss: 0.1832\n",
      "2986/6056, train_loss: 0.1719\n",
      "2987/6056, train_loss: 0.2403\n",
      "2988/6056, train_loss: 0.1585\n",
      "2989/6056, train_loss: 0.1146\n",
      "2990/6056, train_loss: 0.2089\n",
      "2991/6056, train_loss: 0.1968\n",
      "2992/6056, train_loss: 0.1828\n",
      "2993/6056, train_loss: 0.1782\n",
      "2994/6056, train_loss: 0.1906\n",
      "2995/6056, train_loss: 0.2132\n",
      "2996/6056, train_loss: 0.2027\n",
      "2997/6056, train_loss: 0.2295\n",
      "2998/6056, train_loss: 0.1322\n",
      "2999/6056, train_loss: 0.2061\n",
      "3000/6056, train_loss: 0.1578\n",
      "3001/6056, train_loss: 0.2096\n",
      "3002/6056, train_loss: 0.1543\n",
      "3003/6056, train_loss: 0.2469\n",
      "3004/6056, train_loss: 0.2212\n",
      "3005/6056, train_loss: 0.1483\n",
      "3006/6056, train_loss: 0.1778\n",
      "3007/6056, train_loss: 0.2153\n",
      "3008/6056, train_loss: 0.1564\n",
      "3009/6056, train_loss: 0.1705\n",
      "3010/6056, train_loss: 0.1403\n",
      "3011/6056, train_loss: 0.2031\n",
      "3012/6056, train_loss: 0.1484\n",
      "3013/6056, train_loss: 0.2256\n",
      "3014/6056, train_loss: 0.2760\n",
      "3015/6056, train_loss: 0.1919\n",
      "3016/6056, train_loss: 0.1200\n",
      "3017/6056, train_loss: 0.2084\n",
      "3018/6056, train_loss: 0.1492\n",
      "3019/6056, train_loss: 0.1987\n",
      "3020/6056, train_loss: 0.1929\n",
      "3021/6056, train_loss: 0.2004\n",
      "3022/6056, train_loss: 0.1373\n",
      "3023/6056, train_loss: 0.1333\n",
      "3024/6056, train_loss: 0.1617\n",
      "3025/6056, train_loss: 0.1899\n",
      "3026/6056, train_loss: 0.3196\n",
      "3027/6056, train_loss: 0.1652\n",
      "3028/6056, train_loss: 0.2274\n",
      "3029/6056, train_loss: 0.1857\n",
      "3030/6056, train_loss: 0.2412\n",
      "3031/6056, train_loss: 0.0908\n",
      "3032/6056, train_loss: 0.2232\n",
      "3033/6056, train_loss: 0.1334\n",
      "3034/6056, train_loss: 0.1958\n",
      "3035/6056, train_loss: 0.1892\n",
      "3036/6056, train_loss: 0.2176\n",
      "3037/6056, train_loss: 0.1160\n",
      "3038/6056, train_loss: 0.2960\n",
      "3039/6056, train_loss: 0.2094\n",
      "3040/6056, train_loss: 0.2176\n",
      "3041/6056, train_loss: 0.1908\n",
      "3042/6056, train_loss: 0.2167\n",
      "3043/6056, train_loss: 0.1662\n",
      "3044/6056, train_loss: 0.2493\n",
      "3045/6056, train_loss: 0.2013\n",
      "3046/6056, train_loss: 0.1177\n",
      "3047/6056, train_loss: 0.1730\n",
      "3048/6056, train_loss: 0.0835\n",
      "3049/6056, train_loss: 0.1677\n",
      "3050/6056, train_loss: 0.2442\n",
      "3051/6056, train_loss: 0.2067\n",
      "3052/6056, train_loss: 0.1730\n",
      "3053/6056, train_loss: 0.1329\n",
      "3054/6056, train_loss: 0.1723\n",
      "3055/6056, train_loss: 0.2458\n",
      "3056/6056, train_loss: 0.2729\n",
      "3057/6056, train_loss: 0.2115\n",
      "3058/6056, train_loss: 0.1537\n",
      "3059/6056, train_loss: 0.1485\n",
      "3060/6056, train_loss: 0.2143\n",
      "3061/6056, train_loss: 0.1941\n",
      "3062/6056, train_loss: 0.1274\n",
      "3063/6056, train_loss: 0.2448\n",
      "3064/6056, train_loss: 0.1716\n",
      "3065/6056, train_loss: 0.1465\n",
      "3066/6056, train_loss: 0.2329\n",
      "3067/6056, train_loss: 0.2297\n",
      "3068/6056, train_loss: 0.1472\n",
      "3069/6056, train_loss: 0.1265\n",
      "3070/6056, train_loss: 0.2437\n",
      "3071/6056, train_loss: 0.1444\n",
      "3072/6056, train_loss: 0.2574\n",
      "3073/6056, train_loss: 0.2095\n",
      "3074/6056, train_loss: 0.1775\n",
      "3075/6056, train_loss: 0.1959\n",
      "3076/6056, train_loss: 0.1822\n",
      "3077/6056, train_loss: 0.1388\n",
      "3078/6056, train_loss: 0.2473\n",
      "3079/6056, train_loss: 0.1551\n",
      "3080/6056, train_loss: 0.0936\n",
      "3081/6056, train_loss: 0.1226\n",
      "3082/6056, train_loss: 0.1197\n",
      "3083/6056, train_loss: 0.1543\n",
      "3084/6056, train_loss: 0.1649\n",
      "3085/6056, train_loss: 0.2297\n",
      "3086/6056, train_loss: 0.1510\n",
      "3087/6056, train_loss: 0.1908\n",
      "3088/6056, train_loss: 0.1210\n",
      "3089/6056, train_loss: 0.1271\n",
      "3090/6056, train_loss: 0.1883\n",
      "3091/6056, train_loss: 0.1837\n",
      "3092/6056, train_loss: 0.2171\n",
      "3093/6056, train_loss: 0.1004\n",
      "3094/6056, train_loss: 0.1044\n",
      "3095/6056, train_loss: 0.1526\n",
      "3096/6056, train_loss: 0.3606\n",
      "3097/6056, train_loss: 0.1629\n",
      "3098/6056, train_loss: 0.1914\n",
      "3099/6056, train_loss: 0.1952\n",
      "3100/6056, train_loss: 0.1787\n",
      "3101/6056, train_loss: 0.1903\n",
      "3102/6056, train_loss: 0.1836\n",
      "3103/6056, train_loss: 0.1550\n",
      "3104/6056, train_loss: 0.2926\n",
      "3105/6056, train_loss: 0.2018\n",
      "3106/6056, train_loss: 0.1132\n",
      "3107/6056, train_loss: 0.1527\n",
      "3108/6056, train_loss: 0.1993\n",
      "3109/6056, train_loss: 0.2606\n",
      "3110/6056, train_loss: 0.1685\n",
      "3111/6056, train_loss: 0.2054\n",
      "3112/6056, train_loss: 0.1614\n",
      "3113/6056, train_loss: 0.1538\n",
      "3114/6056, train_loss: 0.2071\n",
      "3115/6056, train_loss: 0.1863\n",
      "3116/6056, train_loss: 0.1559\n",
      "3117/6056, train_loss: 0.1836\n",
      "3118/6056, train_loss: 0.1208\n",
      "3119/6056, train_loss: 0.1869\n",
      "3120/6056, train_loss: 0.3119\n",
      "3121/6056, train_loss: 0.1588\n",
      "3122/6056, train_loss: 0.1115\n",
      "3123/6056, train_loss: 0.1745\n",
      "3124/6056, train_loss: 0.1802\n",
      "3125/6056, train_loss: 0.2815\n",
      "3126/6056, train_loss: 0.1982\n",
      "3127/6056, train_loss: 0.2312\n",
      "3128/6056, train_loss: 0.2396\n",
      "3129/6056, train_loss: 0.2126\n",
      "3130/6056, train_loss: 0.2265\n",
      "3131/6056, train_loss: 0.2299\n",
      "3132/6056, train_loss: 0.1272\n",
      "3133/6056, train_loss: 0.1587\n",
      "3134/6056, train_loss: 0.2054\n",
      "3135/6056, train_loss: 0.1455\n",
      "3136/6056, train_loss: 0.2403\n",
      "3137/6056, train_loss: 0.1530\n",
      "3138/6056, train_loss: 0.1489\n",
      "3139/6056, train_loss: 0.1349\n",
      "3140/6056, train_loss: 0.1516\n",
      "3141/6056, train_loss: 0.1468\n",
      "3142/6056, train_loss: 0.2629\n",
      "3143/6056, train_loss: 0.1796\n",
      "3144/6056, train_loss: 0.2414\n",
      "3145/6056, train_loss: 0.1174\n",
      "3146/6056, train_loss: 0.1094\n",
      "3147/6056, train_loss: 0.2249\n",
      "3148/6056, train_loss: 0.1976\n",
      "3149/6056, train_loss: 0.2177\n",
      "3150/6056, train_loss: 0.2660\n",
      "3151/6056, train_loss: 0.1843\n",
      "3152/6056, train_loss: 0.2659\n",
      "3153/6056, train_loss: 0.2064\n",
      "3154/6056, train_loss: 0.1410\n",
      "3155/6056, train_loss: 0.1341\n",
      "3156/6056, train_loss: 0.1930\n",
      "3157/6056, train_loss: 0.1284\n",
      "3158/6056, train_loss: 0.2630\n",
      "3159/6056, train_loss: 0.1184\n",
      "3160/6056, train_loss: 0.2121\n",
      "3161/6056, train_loss: 0.1770\n",
      "3162/6056, train_loss: 0.1232\n",
      "3163/6056, train_loss: 0.2126\n",
      "3164/6056, train_loss: 0.2713\n",
      "3165/6056, train_loss: 0.1382\n",
      "3166/6056, train_loss: 0.1471\n",
      "3167/6056, train_loss: 0.3143\n",
      "3168/6056, train_loss: 0.1552\n",
      "3169/6056, train_loss: 0.1598\n",
      "3170/6056, train_loss: 0.2892\n",
      "3171/6056, train_loss: 0.1246\n",
      "3172/6056, train_loss: 0.2174\n",
      "3173/6056, train_loss: 0.3278\n",
      "3174/6056, train_loss: 0.2628\n",
      "3175/6056, train_loss: 0.2121\n",
      "3176/6056, train_loss: 0.1575\n",
      "3177/6056, train_loss: 0.1311\n",
      "3178/6056, train_loss: 0.1514\n",
      "3179/6056, train_loss: 0.1541\n",
      "3180/6056, train_loss: 0.1650\n",
      "3181/6056, train_loss: 0.1744\n",
      "3182/6056, train_loss: 0.1886\n",
      "3183/6056, train_loss: 0.2641\n",
      "3184/6056, train_loss: 0.1402\n",
      "3185/6056, train_loss: 0.1741\n",
      "3186/6056, train_loss: 0.2770\n",
      "3187/6056, train_loss: 0.1486\n",
      "3188/6056, train_loss: 0.3152\n",
      "3189/6056, train_loss: 0.1378\n",
      "3190/6056, train_loss: 0.1172\n",
      "3191/6056, train_loss: 0.2482\n",
      "3192/6056, train_loss: 0.1945\n",
      "3193/6056, train_loss: 0.1737\n",
      "3194/6056, train_loss: 0.2179\n",
      "3195/6056, train_loss: 0.2053\n",
      "3196/6056, train_loss: 0.2184\n",
      "3197/6056, train_loss: 0.2063\n",
      "3198/6056, train_loss: 0.1799\n",
      "3199/6056, train_loss: 0.1884\n",
      "3200/6056, train_loss: 0.1893\n",
      "3201/6056, train_loss: 0.1729\n",
      "3202/6056, train_loss: 0.1481\n",
      "3203/6056, train_loss: 0.1820\n",
      "3204/6056, train_loss: 0.2570\n",
      "3205/6056, train_loss: 0.2462\n",
      "3206/6056, train_loss: 0.2120\n",
      "3207/6056, train_loss: 0.2353\n",
      "3208/6056, train_loss: 0.2361\n",
      "3209/6056, train_loss: 0.1608\n",
      "3210/6056, train_loss: 0.1794\n",
      "3211/6056, train_loss: 0.1708\n",
      "3212/6056, train_loss: 0.1518\n",
      "3213/6056, train_loss: 0.1952\n",
      "3214/6056, train_loss: 0.2575\n",
      "3215/6056, train_loss: 0.0936\n",
      "3216/6056, train_loss: 0.1534\n",
      "3217/6056, train_loss: 0.2147\n",
      "3218/6056, train_loss: 0.1655\n",
      "3219/6056, train_loss: 0.2111\n",
      "3220/6056, train_loss: 0.2087\n",
      "3221/6056, train_loss: 0.1742\n",
      "3222/6056, train_loss: 0.2227\n",
      "3223/6056, train_loss: 0.2436\n",
      "3224/6056, train_loss: 0.1686\n",
      "3225/6056, train_loss: 0.1143\n",
      "3226/6056, train_loss: 0.1944\n",
      "3227/6056, train_loss: 0.1470\n",
      "3228/6056, train_loss: 0.1732\n",
      "3229/6056, train_loss: 0.1095\n",
      "3230/6056, train_loss: 0.1810\n",
      "3231/6056, train_loss: 0.1301\n",
      "3232/6056, train_loss: 0.2351\n",
      "3233/6056, train_loss: 0.1978\n",
      "3234/6056, train_loss: 0.2090\n",
      "3235/6056, train_loss: 0.2602\n",
      "3236/6056, train_loss: 0.2389\n",
      "3237/6056, train_loss: 0.3209\n",
      "3238/6056, train_loss: 0.2055\n",
      "3239/6056, train_loss: 0.2232\n",
      "3240/6056, train_loss: 0.2144\n",
      "3241/6056, train_loss: 0.1521\n",
      "3242/6056, train_loss: 0.2157\n",
      "3243/6056, train_loss: 0.1881\n",
      "3244/6056, train_loss: 0.1581\n",
      "3245/6056, train_loss: 0.1754\n",
      "3246/6056, train_loss: 0.1590\n",
      "3247/6056, train_loss: 0.2694\n",
      "3248/6056, train_loss: 0.1762\n",
      "3249/6056, train_loss: 0.1047\n",
      "3250/6056, train_loss: 0.1510\n",
      "3251/6056, train_loss: 0.1648\n",
      "3252/6056, train_loss: 0.1408\n",
      "3253/6056, train_loss: 0.1532\n",
      "3254/6056, train_loss: 0.1328\n",
      "3255/6056, train_loss: 0.2234\n",
      "3256/6056, train_loss: 0.1296\n",
      "3257/6056, train_loss: 0.2860\n",
      "3258/6056, train_loss: 0.1913\n",
      "3259/6056, train_loss: 0.2237\n",
      "3260/6056, train_loss: 0.0837\n",
      "3261/6056, train_loss: 0.1526\n",
      "3262/6056, train_loss: 0.1934\n",
      "3263/6056, train_loss: 0.2414\n",
      "3264/6056, train_loss: 0.0980\n",
      "3265/6056, train_loss: 0.2142\n",
      "3266/6056, train_loss: 0.1329\n",
      "3267/6056, train_loss: 0.1840\n",
      "3268/6056, train_loss: 0.2373\n",
      "3269/6056, train_loss: 0.1773\n",
      "3270/6056, train_loss: 0.1010\n",
      "3271/6056, train_loss: 0.1249\n",
      "3272/6056, train_loss: 0.1384\n",
      "3273/6056, train_loss: 0.2355\n",
      "3274/6056, train_loss: 0.1994\n",
      "3275/6056, train_loss: 0.1564\n",
      "3276/6056, train_loss: 0.2366\n",
      "3277/6056, train_loss: 0.1956\n",
      "3278/6056, train_loss: 0.1546\n",
      "3279/6056, train_loss: 0.2115\n",
      "3280/6056, train_loss: 0.2819\n",
      "3281/6056, train_loss: 0.2151\n",
      "3282/6056, train_loss: 0.2573\n",
      "3283/6056, train_loss: 0.1749\n",
      "3284/6056, train_loss: 0.1664\n",
      "3285/6056, train_loss: 0.1142\n",
      "3286/6056, train_loss: 0.1917\n",
      "3287/6056, train_loss: 0.2390\n",
      "3288/6056, train_loss: 0.1137\n",
      "3289/6056, train_loss: 0.2830\n",
      "3290/6056, train_loss: 0.2263\n",
      "3291/6056, train_loss: 0.2431\n",
      "3292/6056, train_loss: 0.1597\n",
      "3293/6056, train_loss: 0.2518\n",
      "3294/6056, train_loss: 0.1236\n",
      "3295/6056, train_loss: 0.1320\n",
      "3296/6056, train_loss: 0.2006\n",
      "3297/6056, train_loss: 0.2168\n",
      "3298/6056, train_loss: 0.1867\n",
      "3299/6056, train_loss: 0.2406\n",
      "3300/6056, train_loss: 0.1167\n",
      "3301/6056, train_loss: 0.3214\n",
      "3302/6056, train_loss: 0.1381\n",
      "3303/6056, train_loss: 0.1459\n",
      "3304/6056, train_loss: 0.2337\n",
      "3305/6056, train_loss: 0.2345\n",
      "3306/6056, train_loss: 0.1541\n",
      "3307/6056, train_loss: 0.2050\n",
      "3308/6056, train_loss: 0.1874\n",
      "3309/6056, train_loss: 0.1806\n",
      "3310/6056, train_loss: 0.2788\n",
      "3311/6056, train_loss: 0.2757\n",
      "3312/6056, train_loss: 0.1777\n",
      "3313/6056, train_loss: 0.1341\n",
      "3314/6056, train_loss: 0.3077\n",
      "3315/6056, train_loss: 0.1892\n",
      "3316/6056, train_loss: 0.1941\n",
      "3317/6056, train_loss: 0.1649\n",
      "3318/6056, train_loss: 0.2837\n",
      "3319/6056, train_loss: 0.2232\n",
      "3320/6056, train_loss: 0.2221\n",
      "3321/6056, train_loss: 0.2195\n",
      "3322/6056, train_loss: 0.1333\n",
      "3323/6056, train_loss: 0.1694\n",
      "3324/6056, train_loss: 0.2354\n",
      "3325/6056, train_loss: 0.3048\n",
      "3326/6056, train_loss: 0.2956\n",
      "3327/6056, train_loss: 0.1461\n",
      "3328/6056, train_loss: 0.1849\n",
      "3329/6056, train_loss: 0.2672\n",
      "3330/6056, train_loss: 0.1236\n",
      "3331/6056, train_loss: 0.1134\n",
      "3332/6056, train_loss: 0.1986\n",
      "3333/6056, train_loss: 0.1663\n",
      "3334/6056, train_loss: 0.1821\n",
      "3335/6056, train_loss: 0.2318\n",
      "3336/6056, train_loss: 0.2416\n",
      "3337/6056, train_loss: 0.1816\n",
      "3338/6056, train_loss: 0.1705\n",
      "3339/6056, train_loss: 0.1318\n",
      "3340/6056, train_loss: 0.1898\n",
      "3341/6056, train_loss: 0.1593\n",
      "3342/6056, train_loss: 0.1186\n",
      "3343/6056, train_loss: 0.2870\n",
      "3344/6056, train_loss: 0.1455\n",
      "3345/6056, train_loss: 0.0974\n",
      "3346/6056, train_loss: 0.1675\n",
      "3347/6056, train_loss: 0.1442\n",
      "3348/6056, train_loss: 0.0951\n",
      "3349/6056, train_loss: 0.2321\n",
      "3350/6056, train_loss: 0.2289\n",
      "3351/6056, train_loss: 0.1636\n",
      "3352/6056, train_loss: 0.3088\n",
      "3353/6056, train_loss: 0.0842\n",
      "3354/6056, train_loss: 0.2405\n",
      "3355/6056, train_loss: 0.1599\n",
      "3356/6056, train_loss: 0.2201\n",
      "3357/6056, train_loss: 0.1238\n",
      "3358/6056, train_loss: 0.1966\n",
      "3359/6056, train_loss: 0.1475\n",
      "3360/6056, train_loss: 0.1467\n",
      "3361/6056, train_loss: 0.1567\n",
      "3362/6056, train_loss: 0.2125\n",
      "3363/6056, train_loss: 0.1754\n",
      "3364/6056, train_loss: 0.2213\n",
      "3365/6056, train_loss: 0.1797\n",
      "3366/6056, train_loss: 0.3334\n",
      "3367/6056, train_loss: 0.1324\n",
      "3368/6056, train_loss: 0.1146\n",
      "3369/6056, train_loss: 0.2474\n",
      "3370/6056, train_loss: 0.2253\n",
      "3371/6056, train_loss: 0.1465\n",
      "3372/6056, train_loss: 0.2698\n",
      "3373/6056, train_loss: 0.1511\n",
      "3374/6056, train_loss: 0.1495\n",
      "3375/6056, train_loss: 0.1507\n",
      "3376/6056, train_loss: 0.2137\n",
      "3377/6056, train_loss: 0.2087\n",
      "3378/6056, train_loss: 0.2491\n",
      "3379/6056, train_loss: 0.1007\n",
      "3380/6056, train_loss: 0.2967\n",
      "3381/6056, train_loss: 0.1651\n",
      "3382/6056, train_loss: 0.1013\n",
      "3383/6056, train_loss: 0.2243\n",
      "3384/6056, train_loss: 0.1650\n",
      "3385/6056, train_loss: 0.2053\n",
      "3386/6056, train_loss: 0.1500\n",
      "3387/6056, train_loss: 0.1243\n",
      "3388/6056, train_loss: 0.1713\n",
      "3389/6056, train_loss: 0.1756\n",
      "3390/6056, train_loss: 0.1521\n",
      "3391/6056, train_loss: 0.1932\n",
      "3392/6056, train_loss: 0.1129\n",
      "3393/6056, train_loss: 0.1480\n",
      "3394/6056, train_loss: 0.1455\n",
      "3395/6056, train_loss: 0.1218\n",
      "3396/6056, train_loss: 0.1789\n",
      "3397/6056, train_loss: 0.1737\n",
      "3398/6056, train_loss: 0.1954\n",
      "3399/6056, train_loss: 0.2180\n",
      "3400/6056, train_loss: 0.2499\n",
      "3401/6056, train_loss: 0.0991\n",
      "3402/6056, train_loss: 0.2628\n",
      "3403/6056, train_loss: 0.1107\n",
      "3404/6056, train_loss: 0.1576\n",
      "3405/6056, train_loss: 0.1358\n",
      "3406/6056, train_loss: 0.1236\n",
      "3407/6056, train_loss: 0.1390\n",
      "3408/6056, train_loss: 0.1157\n",
      "3409/6056, train_loss: 0.1437\n",
      "3410/6056, train_loss: 0.1954\n",
      "3411/6056, train_loss: 0.1513\n",
      "3412/6056, train_loss: 0.1799\n",
      "3413/6056, train_loss: 0.1754\n",
      "3414/6056, train_loss: 0.1655\n",
      "3415/6056, train_loss: 0.2182\n",
      "3416/6056, train_loss: 0.1269\n",
      "3417/6056, train_loss: 0.1826\n",
      "3418/6056, train_loss: 0.2620\n",
      "3419/6056, train_loss: 0.1978\n",
      "3420/6056, train_loss: 0.1746\n",
      "3421/6056, train_loss: 0.1508\n",
      "3422/6056, train_loss: 0.1904\n",
      "3423/6056, train_loss: 0.2032\n",
      "3424/6056, train_loss: 0.2013\n",
      "3425/6056, train_loss: 0.1833\n",
      "3426/6056, train_loss: 0.2073\n",
      "3427/6056, train_loss: 0.2592\n",
      "3428/6056, train_loss: 0.1779\n",
      "3429/6056, train_loss: 0.1060\n",
      "3430/6056, train_loss: 0.1180\n",
      "3431/6056, train_loss: 0.2213\n",
      "3432/6056, train_loss: 0.1656\n",
      "3433/6056, train_loss: 0.1403\n",
      "3434/6056, train_loss: 0.2002\n",
      "3435/6056, train_loss: 0.1701\n",
      "3436/6056, train_loss: 0.1690\n",
      "3437/6056, train_loss: 0.2295\n",
      "3438/6056, train_loss: 0.2374\n",
      "3439/6056, train_loss: 0.0968\n",
      "3440/6056, train_loss: 0.3618\n",
      "3441/6056, train_loss: 0.2150\n",
      "3442/6056, train_loss: 0.2066\n",
      "3443/6056, train_loss: 0.2343\n",
      "3444/6056, train_loss: 0.1600\n",
      "3445/6056, train_loss: 0.2750\n",
      "3446/6056, train_loss: 0.2034\n",
      "3447/6056, train_loss: 0.1323\n",
      "3448/6056, train_loss: 0.1956\n",
      "3449/6056, train_loss: 0.1283\n",
      "3450/6056, train_loss: 0.3322\n",
      "3451/6056, train_loss: 0.1979\n",
      "3452/6056, train_loss: 0.1768\n",
      "3453/6056, train_loss: 0.1676\n",
      "3454/6056, train_loss: 0.2059\n",
      "3455/6056, train_loss: 0.2111\n",
      "3456/6056, train_loss: 0.1448\n",
      "3457/6056, train_loss: 0.1294\n",
      "3458/6056, train_loss: 0.1595\n",
      "3459/6056, train_loss: 0.2020\n",
      "3460/6056, train_loss: 0.1449\n",
      "3461/6056, train_loss: 0.2188\n",
      "3462/6056, train_loss: 0.2174\n",
      "3463/6056, train_loss: 0.1388\n",
      "3464/6056, train_loss: 0.2487\n",
      "3465/6056, train_loss: 0.1386\n",
      "3466/6056, train_loss: 0.1124\n",
      "3467/6056, train_loss: 0.2200\n",
      "3468/6056, train_loss: 0.1657\n",
      "3469/6056, train_loss: 0.1365\n",
      "3470/6056, train_loss: 0.2985\n",
      "3471/6056, train_loss: 0.1942\n",
      "3472/6056, train_loss: 0.1993\n",
      "3473/6056, train_loss: 0.1453\n",
      "3474/6056, train_loss: 0.1719\n",
      "3475/6056, train_loss: 0.2156\n",
      "3476/6056, train_loss: 0.1993\n",
      "3477/6056, train_loss: 0.1307\n",
      "3478/6056, train_loss: 0.1753\n",
      "3479/6056, train_loss: 0.1397\n",
      "3480/6056, train_loss: 0.1450\n",
      "3481/6056, train_loss: 0.2402\n",
      "3482/6056, train_loss: 0.1212\n",
      "3483/6056, train_loss: 0.2412\n",
      "3484/6056, train_loss: 0.2160\n",
      "3485/6056, train_loss: 0.1418\n",
      "3486/6056, train_loss: 0.1071\n",
      "3487/6056, train_loss: 0.2702\n",
      "3488/6056, train_loss: 0.0995\n",
      "3489/6056, train_loss: 0.1839\n",
      "3490/6056, train_loss: 0.2179\n",
      "3491/6056, train_loss: 0.1453\n",
      "3492/6056, train_loss: 0.2168\n",
      "3493/6056, train_loss: 0.1591\n",
      "3494/6056, train_loss: 0.1955\n",
      "3495/6056, train_loss: 0.2466\n",
      "3496/6056, train_loss: 0.1797\n",
      "3497/6056, train_loss: 0.3046\n",
      "3498/6056, train_loss: 0.2544\n",
      "3499/6056, train_loss: 0.1709\n",
      "3500/6056, train_loss: 0.2604\n",
      "3501/6056, train_loss: 0.2538\n",
      "3502/6056, train_loss: 0.1584\n",
      "3503/6056, train_loss: 0.0919\n",
      "3504/6056, train_loss: 0.2160\n",
      "3505/6056, train_loss: 0.2574\n",
      "3506/6056, train_loss: 0.1756\n",
      "3507/6056, train_loss: 0.1897\n",
      "3508/6056, train_loss: 0.1871\n",
      "3509/6056, train_loss: 0.2000\n",
      "3510/6056, train_loss: 0.1746\n",
      "3511/6056, train_loss: 0.2166\n",
      "3512/6056, train_loss: 0.1682\n",
      "3513/6056, train_loss: 0.1812\n",
      "3514/6056, train_loss: 0.1733\n",
      "3515/6056, train_loss: 0.3447\n",
      "3516/6056, train_loss: 0.1712\n",
      "3517/6056, train_loss: 0.1782\n",
      "3518/6056, train_loss: 0.1890\n",
      "3519/6056, train_loss: 0.1471\n",
      "3520/6056, train_loss: 0.2458\n",
      "3521/6056, train_loss: 0.1564\n",
      "3522/6056, train_loss: 0.1995\n",
      "3523/6056, train_loss: 0.2139\n",
      "3524/6056, train_loss: 0.2135\n",
      "3525/6056, train_loss: 0.1950\n",
      "3526/6056, train_loss: 0.2293\n",
      "3527/6056, train_loss: 0.2544\n",
      "3528/6056, train_loss: 0.1550\n",
      "3529/6056, train_loss: 0.1907\n",
      "3530/6056, train_loss: 0.2127\n",
      "3531/6056, train_loss: 0.1837\n",
      "3532/6056, train_loss: 0.3293\n",
      "3533/6056, train_loss: 0.2030\n",
      "3534/6056, train_loss: 0.1912\n",
      "3535/6056, train_loss: 0.1822\n",
      "3536/6056, train_loss: 0.1960\n",
      "3537/6056, train_loss: 0.2293\n",
      "3538/6056, train_loss: 0.2087\n",
      "3539/6056, train_loss: 0.1515\n",
      "3540/6056, train_loss: 0.1498\n",
      "3541/6056, train_loss: 0.1677\n",
      "3542/6056, train_loss: 0.1211\n",
      "3543/6056, train_loss: 0.2789\n",
      "3544/6056, train_loss: 0.1487\n",
      "3545/6056, train_loss: 0.1762\n",
      "3546/6056, train_loss: 0.1851\n",
      "3547/6056, train_loss: 0.1466\n",
      "3548/6056, train_loss: 0.1909\n",
      "3549/6056, train_loss: 0.1598\n",
      "3550/6056, train_loss: 0.1467\n",
      "3551/6056, train_loss: 0.1952\n",
      "3552/6056, train_loss: 0.1475\n",
      "3553/6056, train_loss: 0.1795\n",
      "3554/6056, train_loss: 0.2320\n",
      "3555/6056, train_loss: 0.2156\n",
      "3556/6056, train_loss: 0.1893\n",
      "3557/6056, train_loss: 0.2505\n",
      "3583/6056, train_loss: 0.1848\n",
      "3584/6056, train_loss: 0.1321\n",
      "3585/6056, train_loss: 0.2133\n",
      "3586/6056, train_loss: 0.1218\n",
      "3587/6056, train_loss: 0.1988\n",
      "3588/6056, train_loss: 0.2333\n",
      "3589/6056, train_loss: 0.1811\n",
      "3590/6056, train_loss: 0.1739\n",
      "3591/6056, train_loss: 0.2263\n",
      "3592/6056, train_loss: 0.1944\n",
      "3593/6056, train_loss: 0.1520\n",
      "3594/6056, train_loss: 0.2084\n",
      "3595/6056, train_loss: 0.1709\n",
      "3596/6056, train_loss: 0.1779\n",
      "3597/6056, train_loss: 0.1595\n",
      "3598/6056, train_loss: 0.1927\n",
      "3599/6056, train_loss: 0.1283\n",
      "3600/6056, train_loss: 0.2767\n",
      "3601/6056, train_loss: 0.1817\n",
      "3602/6056, train_loss: 0.1165\n",
      "3603/6056, train_loss: 0.2181\n",
      "3604/6056, train_loss: 0.1039\n",
      "3605/6056, train_loss: 0.2783\n",
      "3606/6056, train_loss: 0.1704\n",
      "3607/6056, train_loss: 0.1417\n",
      "3608/6056, train_loss: 0.2007\n",
      "3609/6056, train_loss: 0.1775\n",
      "3610/6056, train_loss: 0.1186\n",
      "3611/6056, train_loss: 0.2234\n",
      "3612/6056, train_loss: 0.2217\n",
      "3613/6056, train_loss: 0.1549\n",
      "3614/6056, train_loss: 0.1209\n",
      "3615/6056, train_loss: 0.2270\n",
      "3616/6056, train_loss: 0.1978\n",
      "3617/6056, train_loss: 0.2016\n",
      "3618/6056, train_loss: 0.1728\n",
      "3619/6056, train_loss: 0.2561\n",
      "3620/6056, train_loss: 0.2461\n",
      "3621/6056, train_loss: 0.1997\n",
      "3622/6056, train_loss: 0.1669\n",
      "3623/6056, train_loss: 0.2050\n",
      "3624/6056, train_loss: 0.1786\n",
      "3625/6056, train_loss: 0.2318\n",
      "3626/6056, train_loss: 0.2261\n",
      "3627/6056, train_loss: 0.1930\n",
      "3628/6056, train_loss: 0.1045\n",
      "3629/6056, train_loss: 0.1522\n",
      "3630/6056, train_loss: 0.1706\n",
      "3631/6056, train_loss: 0.1748\n",
      "3632/6056, train_loss: 0.2904\n",
      "3633/6056, train_loss: 0.1471\n",
      "3634/6056, train_loss: 0.1511\n",
      "3635/6056, train_loss: 0.1292\n",
      "3636/6056, train_loss: 0.2277\n",
      "3637/6056, train_loss: 0.1969\n",
      "3638/6056, train_loss: 0.1182\n",
      "3639/6056, train_loss: 0.1707\n",
      "3640/6056, train_loss: 0.1260\n",
      "3641/6056, train_loss: 0.1743\n",
      "3642/6056, train_loss: 0.1853\n",
      "3643/6056, train_loss: 0.2034\n",
      "3644/6056, train_loss: 0.1399\n",
      "3645/6056, train_loss: 0.1137\n",
      "3646/6056, train_loss: 0.1279\n",
      "3647/6056, train_loss: 0.1158\n",
      "3648/6056, train_loss: 0.1950\n",
      "3649/6056, train_loss: 0.1363\n",
      "3650/6056, train_loss: 0.1888\n",
      "3651/6056, train_loss: 0.1432\n",
      "3652/6056, train_loss: 0.2302\n",
      "3653/6056, train_loss: 0.1666\n",
      "3654/6056, train_loss: 0.2184\n",
      "3655/6056, train_loss: 0.2349\n",
      "3656/6056, train_loss: 0.2115\n",
      "3657/6056, train_loss: 0.2084\n",
      "3658/6056, train_loss: 0.3067\n",
      "3659/6056, train_loss: 0.1547\n",
      "3660/6056, train_loss: 0.1930\n",
      "3661/6056, train_loss: 0.2444\n",
      "3662/6056, train_loss: 0.1792\n",
      "3663/6056, train_loss: 0.2102\n",
      "3664/6056, train_loss: 0.1694\n",
      "3665/6056, train_loss: 0.2031\n",
      "3666/6056, train_loss: 0.1640\n",
      "3667/6056, train_loss: 0.0998\n",
      "3668/6056, train_loss: 0.1419\n",
      "3669/6056, train_loss: 0.1518\n",
      "3670/6056, train_loss: 0.1610\n",
      "3671/6056, train_loss: 0.2296\n",
      "3672/6056, train_loss: 0.1913\n",
      "3673/6056, train_loss: 0.2043\n",
      "3674/6056, train_loss: 0.1686\n",
      "3675/6056, train_loss: 0.1253\n",
      "3676/6056, train_loss: 0.1673\n",
      "3677/6056, train_loss: 0.2332\n",
      "3678/6056, train_loss: 0.1805\n",
      "3679/6056, train_loss: 0.2401\n",
      "3680/6056, train_loss: 0.3035\n",
      "3681/6056, train_loss: 0.2196\n",
      "3682/6056, train_loss: 0.1447\n",
      "3683/6056, train_loss: 0.1400\n",
      "3684/6056, train_loss: 0.2025\n",
      "3685/6056, train_loss: 0.2754\n",
      "3686/6056, train_loss: 0.1549\n",
      "3687/6056, train_loss: 0.1531\n",
      "3688/6056, train_loss: 0.1866\n",
      "3689/6056, train_loss: 0.2290\n",
      "3690/6056, train_loss: 0.2109\n",
      "3691/6056, train_loss: 0.1683\n",
      "3692/6056, train_loss: 0.1865\n",
      "3693/6056, train_loss: 0.1900\n",
      "3694/6056, train_loss: 0.1715\n",
      "3695/6056, train_loss: 0.2473\n",
      "3696/6056, train_loss: 0.1379\n",
      "3697/6056, train_loss: 0.1565\n",
      "3698/6056, train_loss: 0.2174\n",
      "3699/6056, train_loss: 0.1501\n",
      "3700/6056, train_loss: 0.1373\n",
      "3701/6056, train_loss: 0.2131\n",
      "3702/6056, train_loss: 0.1643\n",
      "3703/6056, train_loss: 0.2222\n",
      "3704/6056, train_loss: 0.1814\n",
      "3705/6056, train_loss: 0.1372\n",
      "3706/6056, train_loss: 0.2855\n",
      "3707/6056, train_loss: 0.3189\n",
      "3708/6056, train_loss: 0.1935\n",
      "3709/6056, train_loss: 0.1563\n",
      "3710/6056, train_loss: 0.2172\n",
      "3711/6056, train_loss: 0.1713\n",
      "3712/6056, train_loss: 0.1550\n",
      "3713/6056, train_loss: 0.1932\n",
      "3714/6056, train_loss: 0.2851\n",
      "3715/6056, train_loss: 0.1421\n",
      "3716/6056, train_loss: 0.1201\n",
      "3717/6056, train_loss: 0.2992\n",
      "3718/6056, train_loss: 0.2350\n",
      "3719/6056, train_loss: 0.2112\n",
      "3720/6056, train_loss: 0.2475\n",
      "3721/6056, train_loss: 0.1964\n",
      "3722/6056, train_loss: 0.0854\n",
      "3723/6056, train_loss: 0.1629\n",
      "3724/6056, train_loss: 0.1938\n",
      "3725/6056, train_loss: 0.2596\n",
      "3726/6056, train_loss: 0.1220\n",
      "3727/6056, train_loss: 0.1427\n",
      "3728/6056, train_loss: 0.2097\n",
      "3729/6056, train_loss: 0.1875\n",
      "3730/6056, train_loss: 0.2058\n",
      "3731/6056, train_loss: 0.1925\n",
      "3732/6056, train_loss: 0.1462\n",
      "3733/6056, train_loss: 0.0982\n",
      "3734/6056, train_loss: 0.1598\n",
      "3735/6056, train_loss: 0.1536\n",
      "3736/6056, train_loss: 0.2010\n",
      "3737/6056, train_loss: 0.2616\n",
      "3738/6056, train_loss: 0.1530\n",
      "3739/6056, train_loss: 0.0993\n",
      "3740/6056, train_loss: 0.3319\n",
      "3741/6056, train_loss: 0.2019\n",
      "3742/6056, train_loss: 0.1802\n",
      "3743/6056, train_loss: 0.1746\n",
      "3744/6056, train_loss: 0.2000\n",
      "3745/6056, train_loss: 0.2842\n",
      "3746/6056, train_loss: 0.2228\n",
      "3747/6056, train_loss: 0.1821\n",
      "3748/6056, train_loss: 0.1302\n",
      "3749/6056, train_loss: 0.1855\n",
      "3750/6056, train_loss: 0.2724\n",
      "3751/6056, train_loss: 0.2526\n",
      "3752/6056, train_loss: 0.0971\n",
      "3753/6056, train_loss: 0.1427\n",
      "3754/6056, train_loss: 0.3444\n",
      "3755/6056, train_loss: 0.2226\n",
      "3756/6056, train_loss: 0.1778\n",
      "3757/6056, train_loss: 0.1242\n",
      "3758/6056, train_loss: 0.2976\n",
      "3759/6056, train_loss: 0.1415\n",
      "3760/6056, train_loss: 0.2526\n",
      "3761/6056, train_loss: 0.1401\n",
      "3762/6056, train_loss: 0.2685\n",
      "3763/6056, train_loss: 0.1475\n",
      "3764/6056, train_loss: 0.1666\n",
      "3765/6056, train_loss: 0.2146\n",
      "3766/6056, train_loss: 0.1740\n",
      "3767/6056, train_loss: 0.1947\n",
      "3768/6056, train_loss: 0.1932\n",
      "3769/6056, train_loss: 0.1753\n",
      "3770/6056, train_loss: 0.2934\n",
      "3771/6056, train_loss: 0.1475\n",
      "3772/6056, train_loss: 0.1788\n",
      "3773/6056, train_loss: 0.1630\n",
      "3774/6056, train_loss: 0.1814\n",
      "3775/6056, train_loss: 0.1266\n",
      "3776/6056, train_loss: 0.2496\n",
      "3777/6056, train_loss: 0.1558\n",
      "3778/6056, train_loss: 0.1415\n",
      "3779/6056, train_loss: 0.2093\n",
      "3780/6056, train_loss: 0.1974\n",
      "3781/6056, train_loss: 0.1336\n",
      "3782/6056, train_loss: 0.1359\n",
      "3783/6056, train_loss: 0.1837\n",
      "3784/6056, train_loss: 0.1308\n",
      "3785/6056, train_loss: 0.1323\n",
      "3786/6056, train_loss: 0.1137\n",
      "3787/6056, train_loss: 0.1899\n",
      "3788/6056, train_loss: 0.2246\n",
      "3789/6056, train_loss: 0.1931\n",
      "3790/6056, train_loss: 0.1987\n",
      "3791/6056, train_loss: 0.2868\n",
      "3792/6056, train_loss: 0.2147\n",
      "3793/6056, train_loss: 0.2996\n",
      "3794/6056, train_loss: 0.2127\n",
      "3795/6056, train_loss: 0.1759\n",
      "3796/6056, train_loss: 0.2340\n",
      "3797/6056, train_loss: 0.1833\n",
      "3798/6056, train_loss: 0.1751\n",
      "3799/6056, train_loss: 0.1946\n",
      "3800/6056, train_loss: 0.2288\n",
      "3801/6056, train_loss: 0.2005\n",
      "3802/6056, train_loss: 0.2675\n",
      "3803/6056, train_loss: 0.1542\n",
      "3804/6056, train_loss: 0.1393\n",
      "3805/6056, train_loss: 0.1068\n",
      "3806/6056, train_loss: 0.1853\n",
      "3807/6056, train_loss: 0.2190\n",
      "3808/6056, train_loss: 0.1782\n",
      "3809/6056, train_loss: 0.1437\n",
      "3810/6056, train_loss: 0.1239\n",
      "3811/6056, train_loss: 0.2046\n",
      "3812/6056, train_loss: 0.1217\n",
      "3813/6056, train_loss: 0.2010\n",
      "3814/6056, train_loss: 0.2509\n",
      "3815/6056, train_loss: 0.2076\n",
      "3816/6056, train_loss: 0.2510\n",
      "3817/6056, train_loss: 0.2155\n",
      "3818/6056, train_loss: 0.1678\n",
      "3819/6056, train_loss: 0.1233\n",
      "3820/6056, train_loss: 0.1708\n",
      "3821/6056, train_loss: 0.3943\n",
      "3822/6056, train_loss: 0.1486\n",
      "3823/6056, train_loss: 0.2136\n",
      "3824/6056, train_loss: 0.1500\n",
      "3825/6056, train_loss: 0.1072\n",
      "3826/6056, train_loss: 0.2216\n",
      "3827/6056, train_loss: 0.1883\n",
      "3828/6056, train_loss: 0.2064\n",
      "3829/6056, train_loss: 0.1297\n",
      "3830/6056, train_loss: 0.1410\n",
      "3831/6056, train_loss: 0.2492\n",
      "3832/6056, train_loss: 0.2341\n",
      "3833/6056, train_loss: 0.1466\n",
      "3834/6056, train_loss: 0.1429\n",
      "3835/6056, train_loss: 0.1355\n",
      "3836/6056, train_loss: 0.1664\n",
      "3837/6056, train_loss: 0.2276\n",
      "3838/6056, train_loss: 0.0962\n",
      "3839/6056, train_loss: 0.1594\n",
      "3840/6056, train_loss: 0.1901\n",
      "3841/6056, train_loss: 0.2279\n",
      "3842/6056, train_loss: 0.1711\n",
      "3843/6056, train_loss: 0.1581\n",
      "3844/6056, train_loss: 0.1149\n",
      "3845/6056, train_loss: 0.1195\n",
      "3846/6056, train_loss: 0.1478\n",
      "3847/6056, train_loss: 0.1363\n",
      "3848/6056, train_loss: 0.1562\n",
      "3849/6056, train_loss: 0.1459\n",
      "3850/6056, train_loss: 0.2203\n",
      "3851/6056, train_loss: 0.2616\n",
      "3852/6056, train_loss: 0.2148\n",
      "3853/6056, train_loss: 0.1971\n",
      "3854/6056, train_loss: 0.2122\n",
      "3855/6056, train_loss: 0.1667\n",
      "3856/6056, train_loss: 0.1666\n",
      "3857/6056, train_loss: 0.1718\n",
      "3858/6056, train_loss: 0.1719\n",
      "3859/6056, train_loss: 0.1990\n",
      "3860/6056, train_loss: 0.1117\n",
      "3861/6056, train_loss: 0.1447\n",
      "3862/6056, train_loss: 0.2363\n",
      "3863/6056, train_loss: 0.1187\n",
      "3864/6056, train_loss: 0.2749\n",
      "3865/6056, train_loss: 0.1217\n",
      "3866/6056, train_loss: 0.2663\n",
      "3867/6056, train_loss: 0.2037\n",
      "3868/6056, train_loss: 0.1673\n",
      "3869/6056, train_loss: 0.1763\n",
      "3870/6056, train_loss: 0.1326\n",
      "3871/6056, train_loss: 0.2793\n",
      "3872/6056, train_loss: 0.1407\n",
      "3873/6056, train_loss: 0.1613\n",
      "3874/6056, train_loss: 0.2545\n",
      "3875/6056, train_loss: 0.2409\n",
      "3876/6056, train_loss: 0.2683\n",
      "3877/6056, train_loss: 0.2100\n",
      "3878/6056, train_loss: 0.2112\n",
      "3879/6056, train_loss: 0.1310\n",
      "3880/6056, train_loss: 0.1716\n",
      "3881/6056, train_loss: 0.1895\n",
      "3882/6056, train_loss: 0.1293\n",
      "3883/6056, train_loss: 0.1215\n",
      "3884/6056, train_loss: 0.1595\n",
      "3885/6056, train_loss: 0.2334\n",
      "3886/6056, train_loss: 0.2333\n",
      "3887/6056, train_loss: 0.1514\n",
      "3888/6056, train_loss: 0.1210\n",
      "3889/6056, train_loss: 0.2004\n",
      "3890/6056, train_loss: 0.2083\n",
      "3891/6056, train_loss: 0.1781\n",
      "3892/6056, train_loss: 0.1841\n",
      "3893/6056, train_loss: 0.1215\n",
      "3894/6056, train_loss: 0.2163\n",
      "3895/6056, train_loss: 0.2258\n",
      "3896/6056, train_loss: 0.1899\n",
      "3897/6056, train_loss: 0.1813\n",
      "3898/6056, train_loss: 0.2143\n",
      "3899/6056, train_loss: 0.2820\n",
      "3900/6056, train_loss: 0.0771\n",
      "3901/6056, train_loss: 0.2072\n",
      "3902/6056, train_loss: 0.3463\n",
      "3903/6056, train_loss: 0.4079\n",
      "3904/6056, train_loss: 0.2178\n",
      "3905/6056, train_loss: 0.1884\n",
      "3906/6056, train_loss: 0.0830\n",
      "3907/6056, train_loss: 0.1798\n",
      "3908/6056, train_loss: 0.1686\n",
      "3909/6056, train_loss: 0.1792\n",
      "3910/6056, train_loss: 0.2101\n",
      "3911/6056, train_loss: 0.1652\n",
      "3912/6056, train_loss: 0.1114\n",
      "3913/6056, train_loss: 0.1603\n",
      "3914/6056, train_loss: 0.1632\n",
      "3915/6056, train_loss: 0.2152\n",
      "3916/6056, train_loss: 0.2499\n",
      "3917/6056, train_loss: 0.1296\n",
      "3918/6056, train_loss: 0.1437\n",
      "3919/6056, train_loss: 0.1855\n",
      "3920/6056, train_loss: 0.1617\n",
      "3921/6056, train_loss: 0.1644\n",
      "3922/6056, train_loss: 0.2082\n",
      "3923/6056, train_loss: 0.1328\n",
      "3924/6056, train_loss: 0.2676\n",
      "3925/6056, train_loss: 0.2181\n",
      "3926/6056, train_loss: 0.1599\n",
      "3927/6056, train_loss: 0.1033\n",
      "3928/6056, train_loss: 0.2932\n",
      "3929/6056, train_loss: 0.1100\n",
      "3930/6056, train_loss: 0.2040\n",
      "3931/6056, train_loss: 0.1680\n",
      "3932/6056, train_loss: 0.1926\n",
      "3933/6056, train_loss: 0.1578\n",
      "3934/6056, train_loss: 0.1619\n",
      "3935/6056, train_loss: 0.1536\n",
      "3936/6056, train_loss: 0.1663\n",
      "3937/6056, train_loss: 0.1399\n",
      "3938/6056, train_loss: 0.2125\n",
      "3939/6056, train_loss: 0.0817\n",
      "3940/6056, train_loss: 0.1893\n",
      "3941/6056, train_loss: 0.1227\n",
      "3942/6056, train_loss: 0.2458\n",
      "3943/6056, train_loss: 0.2125\n",
      "3944/6056, train_loss: 0.1674\n",
      "3945/6056, train_loss: 0.1857\n",
      "3946/6056, train_loss: 0.2849\n",
      "3947/6056, train_loss: 0.1408\n",
      "3948/6056, train_loss: 0.1667\n",
      "3949/6056, train_loss: 0.1351\n",
      "3950/6056, train_loss: 0.1539\n",
      "3951/6056, train_loss: 0.1246\n",
      "3952/6056, train_loss: 0.1778\n",
      "3953/6056, train_loss: 0.2081\n",
      "3954/6056, train_loss: 0.3410\n",
      "3955/6056, train_loss: 0.2718\n",
      "3956/6056, train_loss: 0.1264\n",
      "3957/6056, train_loss: 0.2104\n",
      "3958/6056, train_loss: 0.1128\n",
      "3959/6056, train_loss: 0.1272\n",
      "3960/6056, train_loss: 0.2067\n",
      "3961/6056, train_loss: 0.3358\n",
      "3962/6056, train_loss: 0.3449\n",
      "3963/6056, train_loss: 0.1386\n",
      "3964/6056, train_loss: 0.1474\n",
      "3965/6056, train_loss: 0.2114\n",
      "3966/6056, train_loss: 0.1553\n",
      "3967/6056, train_loss: 0.1947\n",
      "3968/6056, train_loss: 0.2538\n",
      "3969/6056, train_loss: 0.1616\n",
      "3970/6056, train_loss: 0.2656\n",
      "3971/6056, train_loss: 0.1919\n",
      "3972/6056, train_loss: 0.2154\n",
      "3973/6056, train_loss: 0.1538\n",
      "3974/6056, train_loss: 0.1133\n",
      "3975/6056, train_loss: 0.1983\n",
      "3976/6056, train_loss: 0.3435\n",
      "3977/6056, train_loss: 0.1861\n",
      "3978/6056, train_loss: 0.2085\n",
      "3979/6056, train_loss: 0.2526\n",
      "3980/6056, train_loss: 0.1911\n",
      "3981/6056, train_loss: 0.1857\n",
      "3982/6056, train_loss: 0.1377\n",
      "3983/6056, train_loss: 0.2242\n",
      "3984/6056, train_loss: 0.1224\n",
      "3985/6056, train_loss: 0.1326\n",
      "3986/6056, train_loss: 0.1399\n",
      "3987/6056, train_loss: 0.2753\n",
      "3988/6056, train_loss: 0.1353\n",
      "3989/6056, train_loss: 0.1932\n",
      "3990/6056, train_loss: 0.1579\n",
      "3991/6056, train_loss: 0.2783\n",
      "3992/6056, train_loss: 0.1294\n",
      "3993/6056, train_loss: 0.1983\n",
      "3994/6056, train_loss: 0.1227\n",
      "3995/6056, train_loss: 0.3052\n",
      "3996/6056, train_loss: 0.2782\n",
      "3997/6056, train_loss: 0.1915\n",
      "3998/6056, train_loss: 0.2185\n",
      "3999/6056, train_loss: 0.1725\n",
      "4000/6056, train_loss: 0.2573\n",
      "4001/6056, train_loss: 0.1695\n",
      "4002/6056, train_loss: 0.2105\n",
      "4003/6056, train_loss: 0.2079\n",
      "4004/6056, train_loss: 0.1781\n",
      "4005/6056, train_loss: 0.1330\n",
      "4006/6056, train_loss: 0.2140\n",
      "4007/6056, train_loss: 0.1399\n",
      "4008/6056, train_loss: 0.1375\n",
      "4009/6056, train_loss: 0.2039\n",
      "4010/6056, train_loss: 0.1851\n",
      "4011/6056, train_loss: 0.1908\n",
      "4012/6056, train_loss: 0.1035\n",
      "4013/6056, train_loss: 0.1476\n",
      "4014/6056, train_loss: 0.2550\n",
      "4015/6056, train_loss: 0.1753\n",
      "4016/6056, train_loss: 0.1882\n",
      "4017/6056, train_loss: 0.1942\n",
      "4018/6056, train_loss: 0.1309\n",
      "4019/6056, train_loss: 0.1784\n",
      "4020/6056, train_loss: 0.2121\n",
      "4021/6056, train_loss: 0.2294\n",
      "4022/6056, train_loss: 0.2527\n",
      "4023/6056, train_loss: 0.1340\n",
      "4024/6056, train_loss: 0.1917\n",
      "4025/6056, train_loss: 0.2159\n",
      "4026/6056, train_loss: 0.1712\n",
      "4027/6056, train_loss: 0.1080\n",
      "4028/6056, train_loss: 0.1243\n",
      "4029/6056, train_loss: 0.1930\n",
      "4030/6056, train_loss: 0.2903\n",
      "4031/6056, train_loss: 0.1917\n",
      "4032/6056, train_loss: 0.2564\n",
      "4033/6056, train_loss: 0.1995\n",
      "4034/6056, train_loss: 0.1980\n",
      "4035/6056, train_loss: 0.1360\n",
      "4036/6056, train_loss: 0.2091\n",
      "4037/6056, train_loss: 0.2255\n",
      "4038/6056, train_loss: 0.1975\n",
      "4039/6056, train_loss: 0.2238\n",
      "4040/6056, train_loss: 0.1366\n",
      "4041/6056, train_loss: 0.3007\n",
      "4042/6056, train_loss: 0.1991\n",
      "4043/6056, train_loss: 0.1193\n",
      "4044/6056, train_loss: 0.1495\n",
      "4045/6056, train_loss: 0.1785\n",
      "4046/6056, train_loss: 0.1107\n",
      "4047/6056, train_loss: 0.2124\n",
      "4048/6056, train_loss: 0.2363\n",
      "4049/6056, train_loss: 0.1953\n",
      "4050/6056, train_loss: 0.1918\n",
      "4051/6056, train_loss: 0.1521\n",
      "4052/6056, train_loss: 0.1388\n",
      "4053/6056, train_loss: 0.2204\n",
      "4054/6056, train_loss: 0.1788\n",
      "4055/6056, train_loss: 0.1215\n",
      "4056/6056, train_loss: 0.1718\n",
      "4057/6056, train_loss: 0.1483\n",
      "4058/6056, train_loss: 0.1152\n",
      "4059/6056, train_loss: 0.2688\n",
      "4060/6056, train_loss: 0.1186\n",
      "4061/6056, train_loss: 0.1714\n",
      "4062/6056, train_loss: 0.1355\n",
      "4063/6056, train_loss: 0.1732\n",
      "4064/6056, train_loss: 0.1466\n",
      "4065/6056, train_loss: 0.1688\n",
      "4066/6056, train_loss: 0.1566\n",
      "4067/6056, train_loss: 0.2113\n",
      "4068/6056, train_loss: 0.1969\n",
      "4069/6056, train_loss: 0.1371\n",
      "4070/6056, train_loss: 0.1683\n",
      "4071/6056, train_loss: 0.2140\n",
      "4072/6056, train_loss: 0.0999\n",
      "4073/6056, train_loss: 0.1461\n",
      "4074/6056, train_loss: 0.1443\n",
      "4075/6056, train_loss: 0.2737\n",
      "4076/6056, train_loss: 0.2351\n",
      "4077/6056, train_loss: 0.2253\n",
      "4078/6056, train_loss: 0.1428\n",
      "4079/6056, train_loss: 0.3541\n",
      "4080/6056, train_loss: 0.2004\n",
      "4081/6056, train_loss: 0.1690\n",
      "4082/6056, train_loss: 0.2155\n",
      "4083/6056, train_loss: 0.2009\n",
      "4084/6056, train_loss: 0.2613\n",
      "4085/6056, train_loss: 0.3243\n",
      "4086/6056, train_loss: 0.1531\n",
      "4087/6056, train_loss: 0.1821\n",
      "4088/6056, train_loss: 0.1538\n",
      "4089/6056, train_loss: 0.1041\n",
      "4090/6056, train_loss: 0.2008\n",
      "4091/6056, train_loss: 0.1616\n",
      "4092/6056, train_loss: 0.2852\n",
      "4093/6056, train_loss: 0.2332\n",
      "4094/6056, train_loss: 0.2897\n",
      "4095/6056, train_loss: 0.2761\n",
      "4096/6056, train_loss: 0.1540\n",
      "4097/6056, train_loss: 0.1200\n",
      "4098/6056, train_loss: 0.2068\n",
      "4099/6056, train_loss: 0.1613\n",
      "4100/6056, train_loss: 0.1198\n",
      "4101/6056, train_loss: 0.1779\n",
      "4102/6056, train_loss: 0.2234\n"
     ]
    }
   ],
   "source": [
    "best_metric = -1\n",
    "best_metric_epoch = -1\n",
    "epoch_loss_values = []\n",
    "metric_values = []\n",
    "writer = SummaryWriter()\n",
    "model_dir = '/mnt/datawow/lyq/medical_image_model'\n",
    "\n",
    "for epoch in range(max_epochs):\n",
    "    print(\"-\" * 10)\n",
    "    print(f\"epoch {epoch + 1}/{max_epochs}\")\n",
    "    model.train()\n",
    "    epoch_loss = 0\n",
    "    step = 0\n",
    "    for batch_data in train_loader:\n",
    "        step += 1\n",
    "        inputs, labels = batch_data[0].to(device), batch_data[1].to(device)\n",
    "        optimizer.zero_grad()\n",
    "        outputs = model(inputs)\n",
    "        loss = loss_function(outputs, labels)\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        epoch_loss += loss.item()\n",
    "        print(f\"{step}/{len(train_ds) // train_loader.batch_size}, \" f\"train_loss: {loss.item():.4f}\")\n",
    "        epoch_len = len(train_ds) // train_loader.batch_size\n",
    "        writer.add_scalar(\"train_loss\", loss.item(), epoch_len * epoch + step)\n",
    "    epoch_loss /= step\n",
    "    epoch_loss_values.append(epoch_loss)\n",
    "    print(f\"epoch {epoch + 1} average loss: {epoch_loss:.4f}\")\n",
    "\n",
    "    if (epoch + 1) % val_interval == 0:\n",
    "        model.eval()\n",
    "        with torch.no_grad():\n",
    "            y_pred = torch.tensor([], dtype=torch.float32, device=device)\n",
    "            y = torch.tensor([], dtype=torch.long, device=device)\n",
    "            for val_data in val_loader:\n",
    "                val_images, val_labels = (\n",
    "                    val_data[0].to(device),\n",
    "                    val_data[1].to(device),\n",
    "                )\n",
    "                y_pred = torch.cat([y_pred, model(val_images)], dim=0)\n",
    "                y = torch.cat([y, val_labels], dim=0)\n",
    "            # y_onehot = [i for i in decollate_batch(y, detach=False)]\n",
    "            # y_pred_act = [y_pred_trans(i) for i in decollate_batch(y_pred)]\n",
    "            # auc_metric(y_pred_act, y_onehot)\n",
    "            # result = auc_metric.aggregate()\n",
    "            label_list = multi_hot_df.columns.tolist()[1:]\n",
    "            for i in range(15):\n",
    "                y_pred_class, y_class = y_pred[:,i], y[:,i]\n",
    "                auc_metric(y_pred_class, y_class)\n",
    "                result = auc_metric.aggregate()\n",
    "                print(label_list[i], result)\n",
    "                auc_metric.reset()\n",
    "            # del y_pred_act, y_onehot\n",
    "            metric_values.append(result)\n",
    "            # acc_value = torch.eq(y_pred.argmax(dim=1), y)\n",
    "            acc_value = torch.eq(y_pred>0,y)\n",
    "            acc_metric = acc_value.sum().item() / (len(acc_value)*15)\n",
    "            # TODO multilabel需要修改判断逻辑\n",
    "            if result > best_metric:\n",
    "                best_metric = result\n",
    "                best_metric_epoch = epoch + 1\n",
    "                torch.save(model.state_dict(), os.path.join(root_dir, f\"{model_name}_best_metric_model.pth\"))\n",
    "                print(\"saved new best metric model\")\n",
    "            print(\n",
    "                f\"current epoch: {epoch + 1} current AUC: {result:.4f}\"\n",
    "                f\" current accuracy: {acc_metric:.4f}\"\n",
    "                f\" best AUC: {best_metric:.4f}\"\n",
    "                f\" at epoch: {best_metric_epoch}\"\n",
    "            )\n",
    "            writer.add_scalar(\"val_accuracy\", acc_metric, epoch + 1)\n",
    "\n",
    "print(f\"train completed, best_metric: {best_metric:.4f} \" f\"at epoch: {best_metric_epoch}\")\n",
    "writer.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "da851a19-068e-429e-a31e-42b2e7637095",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "30"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(val_loader)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "33055523-a07d-492c-96e2-b7f4d1b003a0",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "16"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "2c2102d4-bd29-48c9-8c62-7b5b11148401",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Atelectasis 0.7879677570211817\n",
      "Cardiomegaly 0.4886621315192744\n",
      "Consolidation 0.4987157534246575\n",
      "Edema 0.23141891891891891\n",
      "Effusion 0.6402224492112133\n",
      "Emphysema 0.4969491525423729\n",
      "Fibrosis 0.7986577181208053\n",
      "Hernia 0.6020066889632107\n",
      "Infiltration 0.586489898989899\n",
      "Mass 0.4841549295774648\n",
      "No Finding 0.590265987549519\n",
      "Nodule 0.567408201554543\n",
      "Pleural_Thickening 0.5539383561643836\n",
      "Pneumonia 0.39057239057239057\n",
      "Pneumothorax 0.6375862068965518\n"
     ]
    }
   ],
   "source": [
    "label_list = multi_hot_df.columns.tolist()[1:]\n",
    "for i in range(15):\n",
    "    y_pred_class, y_class = y_pred[:,i], y[:,i]\n",
    "    auc_metric(y_pred_class, y_class)\n",
    "    result = auc_metric.aggregate()\n",
    "    print(label_list[i], result)\n",
    "    auc_metric.reset()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "21117ef4-7d07-4615-b6de-d97fea98da86",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(torch.Size([300, 15]), torch.Size([300, 15]))"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y.shape, y_pred.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "303331f3-b0ac-40f4-8fed-1d7c939e4511",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(torch.Size([10, 15]), torch.Size([10, 15]))"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "outputs.shape, labels.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 91,
   "id": "d9580dbd-1b29-4087-ae10-2930b3cc4513",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([10, 15])"
      ]
     },
     "execution_count": 91,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_pred = torch.tensor([], dtype=torch.float32, device=device)\n",
    "y_pred = torch.cat([y_pred, model(val_images)], dim=0)\n",
    "y_pred.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 82,
   "id": "82c764b9-d863-4a5b-b6ab-16a5e853a02b",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(torch.Size([300, 15]), torch.Size([300, 15]))"
      ]
     },
     "execution_count": 82,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_pred.shape,  y.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 77,
   "id": "16a7867e-d1ec-45c9-9393-329fdfba9d3f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[metatensor([[0.2580, 0.1588, 0.2086,  ..., 0.1859, 0.1706, 0.1429],\n",
       "         [0.2300, 0.1698, 0.2378,  ..., 0.2217, 0.1844, 0.1590],\n",
       "         [0.2377, 0.1659, 0.2273,  ..., 0.1965, 0.1693, 0.1494],\n",
       "         ...,\n",
       "         [0.2379, 0.1753, 0.2505,  ..., 0.2351, 0.1868, 0.1624],\n",
       "         [0.2524, 0.1668, 0.2244,  ..., 0.2049, 0.1782, 0.1496],\n",
       "         [0.2547, 0.1607, 0.2125,  ..., 0.2001, 0.1747, 0.1451]],\n",
       "        device='cuda:0'),\n",
       " tensor([[0., 0., 0.,  ..., 0., 0., 0.],\n",
       "         [0., 0., 0.,  ..., 0., 0., 0.],\n",
       "         [0., 0., 0.,  ..., 0., 0., 1.],\n",
       "         ...,\n",
       "         [0., 0., 0.,  ..., 0., 0., 0.],\n",
       "         [0., 0., 0.,  ..., 0., 0., 0.],\n",
       "         [0., 0., 0.,  ..., 0., 0., 0.]], device='cuda:0')]"
      ]
     },
     "execution_count": 77,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "auc_metric(y_pred_act, y_onehot)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 81,
   "id": "17f4543d-d2de-45bd-83b1-d0e218d2d224",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([15])"
      ]
     },
     "execution_count": 81,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_pred_act[0].shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 79,
   "id": "36aeefd3-fed1-4888-aceb-b10703925622",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/yawei/.local/lib/python3.10/site-packages/monai/metrics/rocauc.py:83: UserWarning: y values can not be all 0.0, skip AUC computation and return `Nan`.\n",
      "  warnings.warn(f\"y values can not be all {y_unique.item()}, skip AUC computation and return `Nan`.\")\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "nan"
      ]
     },
     "execution_count": 79,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "auc_metric.aggregate()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "id": "e150544a-504d-4146-93bb-fa48f5a20eeb",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "4192"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "np.sum(y_pred<0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 69,
   "id": "31ea5e54-d091-4a2d-afe4-a801f2f630c6",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([300, 15])"
      ]
     },
     "execution_count": 69,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 70,
   "id": "31af257e-ea59-48ba-908a-91dcdaba0b70",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([300, 15])"
      ]
     },
     "execution_count": 70,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_pred.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "id": "9e9d88e6-4793-4b24-906e-5cb7bbd945ab",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "metatensor([[-0.1552, -0.5790, -0.0807,  ..., -0.1025, -0.5308, -0.7933],\n",
       "        [-0.2114, -0.6329, -0.0492,  ..., -0.1928, -0.5460, -0.7030],\n",
       "        [-0.1089, -0.6762, -0.1170,  ..., -0.2959, -0.5481, -0.8387],\n",
       "        ...,\n",
       "        [-0.2542, -0.6220, -0.0690,  ..., -0.1123, -0.4740, -0.7000],\n",
       "        [-0.1770, -0.6235, -0.0930,  ..., -0.1224, -0.4693, -0.6997],\n",
       "        [-0.2211, -0.6337, -0.1038,  ..., -0.2451, -0.5351, -0.7403]],\n",
       "       device='cuda:0')"
      ]
     },
     "execution_count": 63,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "y_pred"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 92,
   "id": "1ac0379f-1c32-4224-81b8-dc0c8315c948",
   "metadata": {},
   "outputs": [
    {
     "ename": "RuntimeError",
     "evalue": "Expected all tensors to be on the same device, but found at least two devices, cuda:0 and cpu!",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mRuntimeError\u001b[0m                              Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[92], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m acc_value \u001b[38;5;241m=\u001b[39m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43meq\u001b[49m\u001b[43m(\u001b[49m\u001b[43my_pred\u001b[49m\u001b[38;5;241;43m>\u001b[39;49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43my\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/monai/data/meta_tensor.py:282\u001b[0m, in \u001b[0;36mMetaTensor.__torch_function__\u001b[0;34m(cls, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m    280\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m kwargs \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    281\u001b[0m     kwargs \u001b[38;5;241m=\u001b[39m {}\n\u001b[0;32m--> 282\u001b[0m ret \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__torch_function__\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    283\u001b[0m \u001b[38;5;66;03m# if `out` has been used as argument, metadata is not copied, nothing to do.\u001b[39;00m\n\u001b[1;32m    284\u001b[0m \u001b[38;5;66;03m# if \"out\" in kwargs:\u001b[39;00m\n\u001b[1;32m    285\u001b[0m \u001b[38;5;66;03m#     return ret\u001b[39;00m\n\u001b[1;32m    286\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _not_requiring_metadata(ret):\n",
      "File \u001b[0;32m~/.local/lib/python3.10/site-packages/torch/_tensor.py:1443\u001b[0m, in \u001b[0;36mTensor.__torch_function__\u001b[0;34m(cls, func, types, args, kwargs)\u001b[0m\n\u001b[1;32m   1440\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mNotImplemented\u001b[39m\n\u001b[1;32m   1442\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _C\u001b[38;5;241m.\u001b[39mDisableTorchFunctionSubclass():\n\u001b[0;32m-> 1443\u001b[0m     ret \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1444\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m func \u001b[38;5;129;01min\u001b[39;00m get_default_nowrap_functions():\n\u001b[1;32m   1445\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m ret\n",
      "\u001b[0;31mRuntimeError\u001b[0m: Expected all tensors to be on the same device, but found at least two devices, cuda:0 and cpu!"
     ]
    }
   ],
   "source": [
    "acc_value = torch.eq(y_pred>0,y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 68,
   "id": "e4ac8f2a-c9ca-45d1-8ebf-f2f4283f929c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([300, 15])"
      ]
     },
     "execution_count": 68,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "acc_value.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "id": "8e39ab48-9c6c-4455-ad98-30d2e0c7abe4",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([10, 15])"
      ]
     },
     "execution_count": 71,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "outputs.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "id": "ab32307f-feff-4b7b-86a7-015093ddc6e7",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([10, 1, 512, 512])"
      ]
     },
     "execution_count": 72,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "x.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 76,
   "id": "ecc45f29-1ade-48a6-a8c6-4efaeb86db8d",
   "metadata": {},
   "outputs": [],
   "source": [
    "auc_metric.get_buffer()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f99b6599-b64b-4fc8-a57c-c1ae78081a64",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.14"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
