{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 第六章：卷积神经网络\n",
    "湖北理工学院《机器学习》课程资料\n",
    "\n",
    "作者：李辉楚吴\n",
    "\n",
    "笔记内容概述: 迁移学习、ResNet"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Downloading https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz to ./Data\\cifar-10-python.tar.gz\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100.0%\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./Data\\cifar-10-python.tar.gz to ./Data\n",
      "Test set size: 10000\n",
      "Files already downloaded and verified\n",
      "Training set size: 50000\n",
      "Number of training samples: 40000\n",
      "Number of cross-validation samples: 10000\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "import torchvision\n",
    "import torchvision.transforms as transforms\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "label_size = 18 # Label size\n",
    "ticklabel_size = 14 # Tick label size\n",
    "    \n",
    "# Define a transform to normalize the data\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor()\n",
    "])\n",
    "\n",
    "# Load test data from the MNIST\n",
    "testset = torchvision.datasets.CIFAR10(root='./Data', train=False, download=True, transform=transform)\n",
    "print(f\"Test set size: {len(testset)}\")\n",
    "\n",
    "# Load training data from the MNIST\n",
    "trainset = torchvision.datasets.CIFAR10(root='./Data', train=True, download=True, transform=transform)\n",
    "print(f\"Training set size: {len(trainset)}\")\n",
    "\n",
    "# Rate of trX and cvX\n",
    "tr_cv_rate = 0.8\n",
    "\n",
    "# Create a list to store indices for each class unique()\n",
    "class_indices = [[] for _ in range(10)]  # 10 classes in MNIST\n",
    "\n",
    "# Populate class_indices\n",
    "for idx, (_, label) in enumerate(trainset):\n",
    "    class_indices[label].append(idx)\n",
    "\n",
    "# Calculate the number of samples for each class in training and validation sets\n",
    "train_size_per_class = int(tr_cv_rate * min(len(indices) for indices in class_indices))\n",
    "val_size_per_class = min(len(indices) for indices in class_indices) - train_size_per_class\n",
    "\n",
    "# Create balanced train and validation sets\n",
    "train_indices = []\n",
    "val_indices = []\n",
    "for indices in class_indices:\n",
    "    train_indices.extend(indices[:train_size_per_class])\n",
    "    val_indices.extend(indices[train_size_per_class:train_size_per_class + val_size_per_class])\n",
    "\n",
    "# Create Subset datasets\n",
    "from torch.utils.data import Subset\n",
    "trX = Subset(trainset, train_indices)\n",
    "cvX = Subset(trainset, val_indices)\n",
    "\n",
    "print(f\"Number of training samples: {len(trX)}\")\n",
    "print(f\"Number of cross-validation samples: {len(cvX)}\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "构建DataLoaders，准备训练模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "image_channels is 3\n",
      "tensor([0., 0., 1., 0., 0., 0., 0., 0., 0., 0.])\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeEAAAH2CAYAAABHmTQtAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAAAgxklEQVR4nO3dW6ymd10v8N97XsdZaw7tTDuVtk5tgaobs7ck0n1hS0zUoImCojGKCJGEGG8wUTelysErvfFCUi+wGIFITFCr4Ua0MUZQOxc77K24ofTsZlo651lr1lrvaV80NI7Tdpbfl/Lfgc8n4aKL+a3f8z7v8z7f9Uyn8+3M5/N5AQDfcN3WBwAA36qEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwvIiPfvSj1el06uTJk1+X79fpdOqXfumXvi7f699/z9/8zd+MZv8zr+/nf/7n65Zbbon2/Md9jz/++ELfB77ZCGHgZb3vfe+rP/3TP219GPBNqd/6AID/v504ceKav2Y+n9fOzk4tLy9/A44Ivnl4EobQzs5Ovec976nXve51tbGxUYcOHarv+77vqz//8z9/yZnf//3fr9tvv71Go1G99rWvrT/+4z++6tecOnWq3vWud9VNN91Uw+Gwbr311nr/+99fk8nk6/4azp49W29/+9vr0KFDtbq6Wj/yIz9Sjz766BW/5sV+O/prv71+//3312te85oajUb1h3/4h1VV9Q//8A9111131dLSUt14443167/+6zUej7/uxw7fDDwJQ2h3d7fOnDlTv/Irv1LHjx+vvb29+sxnPlM//uM/Xg888ED93M/93BW//sEHH6yHHnqoPvCBD9Tq6mp9+MMfrp/+6Z+ufr9fb3nLW6rq+QB+/etfX91ut+677746ceJEfe5zn6sPfehD9fjjj9cDDzzwssf0tbDc7797fcc73lE/8AM/UJ/4xCfqqaeeqnvvvbe+//u/vz7/+c/X5ubmy87+2Z/9Wf3d3/1d3XfffXXs2LG6/vrr61/+5V/qjW98Y91yyy310Y9+tFZWVurDH/5wfeITn9jX8cC3nDlwlQceeGBeVfOHH3543zOTyWQ+Ho/n73jHO+bf8z3fc8X/V1Xz5eXl+alTp6749a9+9avnt9122wtfe9e73jVfW1ubP/HEE1fM/87v/M68qub//M//fMX3/I3f+I0rft2JEyfmJ06c2Pfr+7Ef+7Ervv73f//386qaf+hDH3rha29729vmN99881WvZ2NjY37mzJkrvv7Wt771JV9nVc0fe+yxax4bfCvx29GwgD/5kz+pu+66q9bW1qrf79dgMKiPfOQj9YUvfOGqX/vGN76xjh49+sI/93q9eutb31qPPPJIPf3001VV9Zd/+Zd1991314033liTyeSF//3QD/1QVVX97d/+7csezyOPPFKPPPLIvo//Z37mZ6745ze84Q11880310MPPXTN2XvuuacOHjx4xdceeuihl3ydwNWEMIQ+9alP1U/+5E/W8ePH62Mf+1h97nOfq4cffrh+4Rd+oXZ2dq769ceOHXvJr50+fbqqqp555pn6i7/4ixoMBlf8784776yqqueee+7r+hpe6pi+djwv54Ybbrjqa6dPn37Z1wlcyb8ThtDHPvaxuvXWW+uTn/xkdTqdF76+u7v7or/+1KlTL/m1w4cPV1XVkSNH6ru/+7vrt37rt170e9x4442LHva+jum222675uy/f81fc/jw4Zd9ncCVhDCEOp1ODYfDK8Lo1KlTL/mno//6r/+6nnnmmRd+q3Y6ndYnP/nJOnHiRN10001VVfWmN72pPv3pT9eJEyeu+q3eV8LHP/7xevOb3/zCP3/2s5+tJ554ot75zndG3+/uu++uBx988EVfJ3A1IQwv42/+5m9e9E8a//AP/3C96U1vqk996lP17ne/u97ylrfUU089VR/84AfrhhtuqC996UtXzRw5cqTuueeeet/73vfCn47+13/91yv+M6UPfOAD9Vd/9Vf1hje8oX75l3+57rjjjtrZ2anHH3+8Pv3pT9f999//QmC/mK89we733wufPHmy3vnOd9ZP/MRP1FNPPVXvfe976/jx4/Xud797X/P/0b333lsPPvhg3XPPPXXffffVyspK/d7v/V5tbW1F3w++2QlheBm/+qu/+qJff+yxx+rtb397Pfvss3X//ffXH/zBH9S3f/u316/92q/V008/Xe9///uvmvnRH/3RuvPOO+vee++tJ598sk6cOFEf//jHr/hDSzfccEOdPHmyPvjBD9Zv//Zv19NPP13r6+t166231g/+4A9e8+n4P/vfEn/kIx+pP/qjP6qf+qmfqt3d3br77rvrd3/3d+vQoUP/qe/zNd/5nd9Zn/nMZ+o973lPve1tb6uDBw/Wz/7sz9ab3/zm+sVf/MXoe8I3s858Pp+3PggA+FbkT0cDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjez7L+u473+8N1qwSBF5+p8wnz9/Pt559ty5aG4yn8U7U6PRKJp7sb/zd7/29vaiucPX6KZ9OenftrTItbc0WormLu9cjnf2utn7cv31R6K5ixcvRnNVzzcjJdLrp6petBRjP7rd7Firqubh53p1dTWa6/fzvz8p/VzPJvm9q9fLnuNWVrPPV1XVdJpdQ+cvnIl3jneznR/9xIv/Fbb/nidhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARvZd2ZG2ewwGg2iuqqrbzX5GmE6n8c5KG4bCNpGqvPVpbW0tmhsOh9FcVdXZs2eznUvL8c619Y1objIZxztns6xZZhLOVVX1e9m1lzYaHThwIJqratPgtb29Hc0t0tyUXgfpZ2yRBrj0Xru6lDU+VVVNw6ayye4CDWfhveT6Q8finaMF7l/X4kkYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI3su59wPp9HCxapzUtrzzY2suq7qqr1cLbbz+rkqqpuuummaC49t+l7WVV17FhWBzYa5lVgaUXbdJrXCs7n2ezh647GO3e2L0ZzvU5W3bmyklfYLS9n7+ci94Otra1o7sKFC/HO9B60yGcslR7r2a+ejnceDOswZ5O8brYTfqxXVrLq16qq4XL+WbkWT8IA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACN7LtFaW0ta6Do9fJ2oek0a9pI20SqqqZh+0lvsO9TeZV+P5s9f/58NHfq1KlorqoqLYfZ2DgU79zevhzNLdJkc/RY1oa0SDPRoJ/9TDzoZrUyi3xO0nM7mUzinbu7u9HcbJa3aaX3r/R1rq7m1096rE9++Yvxzo3VrOHs6NGsja2qqtfNdk4WaG66GN5r98OTMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBoRAgDQCNCGAAa2XeHXreb5XVaR1hVcW/eYJBVXVVVdedZ7dl4mtelPfnko9HcyZMno7kzZ85Ec1VVm5sHo7lDR/LqssFwFM0d3NyMdy5f3I7mBsPleOeB9fVorjPZieYu72RzVXk94CJVhmlV32iUXT9VC96/AuMFzs+lS5eiueWVvIZ1OMrOz3CY14zOZ9k52tnJzk9VVX+Bz/W1eBIGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBoZN/1GWmbyDxsQqqqGg6H0Vza8FJV1Q+bWra2s9adqqrP/6/PR3OPPfF4NHdg/UA0V1W1EbYorS2wc3VtI5o7eDCbq6paWVuN5nbHe/HO7e3sMzbdza69c+fORnNVVd1O9vP75mb+nvS62WezM8yfNXZ2LkdzvbB1bjYZR3NVVf1Odq89tJFd61VVo352r93dPh/v7HazXOh18kasyThvHLsWT8IA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaGTfVYZpreDeXl7tNh5ntV69sI6wqqofV5Dlr/PcxQvR3CT8EWplI6+TO3zk+mjutju+K965srYWzQ2Hg3jn8soomptM8+tg9+KlaG5W2etcXc1qKavyyr1eZ9+3nKt0qpMN9vP7wXyeVdhtX8iq+pbyQ60Dy9m5vbSzQPVreH/vdheoBuxnlY2LPHFe3s0rJq/FkzAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0Aj+67d6Pezho7z57M2kaqqra2taG5lZSXeeeRw1iwzm07ineNx2EQSvieHjxyJ5qqqDh3OZjcPHYp3DkfL0dzG5nq88+abb4rmzpz9arzzkS98MZrrD8LzcyBvUerMp9HcdJy358xmWdtPb5A1wFXl197ycnYPmmxnjWpVVTvb2X2k18mfxWbj7L53OWynqqqqXnYddHp5g1dlxU374kkYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgkQVqJfZnOMwbTCaTrKFjeTlrPqmquC1jMs1aZaqqpmE7THqwBzY2wn1VRxZoYEodPpw1MB09dl28c2PjQDT33Oln4p2XL2fNMrtb2VzajFZVtb6yFM3NFviczGdZS9BkbxzvnIb3oIOHDkdzvYP5Z/O5Z74SzW3t5A1wvcpmh70Fmq26vWiu0+nEOwf9V+551ZMwADQihAGgESEMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABp5xasMDxzIKuGqqlZWVqK5ReoTZ5Os9qzfz+q1np/N3oZxWLM2GefVbun7eTisdquqes1rXh3NHTq0Ge8cT3ajueEgv/bWDmQ1drOwiW5r+3I2WFU7l7ejudXlrAKxqqrXz87tZJxVPVZVra+vR3NLw/DWOs1rBVc3s6rH3Z2teOflrXPRXG+W1wqOJ1kd5mye3/cGowXqca/BkzAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0Aj+6766PWylqBOJ2/LSGen06xlo6qq181+LtnY2Ix3HjiQNbU8depUNDccjaK5qqrjx49Hc4evz+ael10Hi1x7m5tZW9Qdd9wR75yM59Hcl3YfieYub1+K5qqqzp07Hc3NZtm1XlV1y82vygbneTPR1lZ2jnZ3sxau57761WiuqmoWNjCtbOYNZ1thm9beNLvWq/I2pEXa4/phg9d+eBIGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBoRAgDQCP7rjJMa+Fms1k0V1W1t7eXzS1QWbU03PcpuUJagfi87Nym9ZKHD+fVZcvLy9Hc5uZGvHM9rHocjvL6sekkq8NcpD5xdXUtmtvdy673re3L0dzzszvR3MXzF+Kd6bk9dv3BeOfOTnaOZvPsWNc2DkVzVVUXL2bndrZArWB/tBLNdbr5zpXlsIp1lldajpaW4tlr8SQMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQyL4rg7oLtQRlhsOsBSdtF6qq6nWz9pPd8W68Mz2zK2GzR6+bNUVVVV28lLXKTPMyrRoOBtFc3mdUNQ+Pd75Aa9g8PEndbna9z+d5k03aVLZ18WK888uPPhrNjfeOxTvT9q/lldVobqWT32eXlrNGoxpvxztHYS6Md7binfNpdq/t9/L7XneBdrRrfu9X7DsDAC9LCANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADSy726nTljltEgF4iCssFukom1rO6v1mi1QnHdgfS2a64TVd7NpNFZVVb1BVtG2u5dV31VVXd7J6hN73azqsapqPp1EcxfPnIt3njt9JprbvnQpmru8lVfYDQZhzeggrxm9uJ3V3+3sZe9lVVWnl73OvXG2c2lpOZqrqjp06HA0158diHduLGfHe/7s6Xjn+bPPRXP9/NKrlZWwJnIfPAkDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0su8WpUXakFKj0SiaG4/zxp7ZLGsmWlnN2oWqqtbXwhalcN+gnzXDVFWNRlkz0d4C78k8fE/6vbw2ZTtt05rmFVWD3r4/jlfohg1nOzs70VxV1WyWtQT1+9lrXGTn+fMX4p2bmwejud3d3WjuwIHsWq+qWlrKPptbu/l1UN3s2psPsnt7VdX1x2+N5lZHeUNVVf6+XIsnYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0su9escFgEC3ohDVri+xcpC6tk1Y25q15dfBgVpdW4blNa9aq2lRa7u5lNYjjSV4rOA0rCdP6zaqqI0cOR3MbGweiuX/7t6eiuaqqnbD+rtudxzun06xO7itf+Uq8M617nM+z17nIZ/Pgwc1ortPLPyfHjl4fzW2sH4p3rh7IPiero/V4Z6eTn6Nr8SQMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQyL7rhnq9rCZokRalRWZTaQPT3mQv3nn06NFobm1tLZobT7JWoqqq5eXlaG40zNuFer3sZ8XJeBLv3N6+HM0t0uC1ubkZzV13XdZkMxp9OZqrqrq8sxXNLXJ+ti6lbVr5dXD27Nlo7sCBrNnq3Llz0VxV3vi0N8ubm86cuxDN3Xbrd8Q7+/3VaG51NWtfqqra3Mzez/3wJAwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANDIvitNBoNBtGA+n0dzVXlz0yI7d3ezRpHJNG9qufHGG6K5m1/1qmjuzOkz0VxV1XQ6jeaGo2G8czRaimdTyytZW1Rnlp2fqqrL29vR3A03ZNfPt33bTdFcVdXlnUvR3Hyef05Go6yJq7vA/WBrK2uLGg6z6/26666L5qqqVlezdqHtney6q6o6f/5cNPfYo4/FO8c7WWPdwbX1eGdnM7sf7IcnYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0su8qw61LWXXZ0nJeAZVWEnY6nXjnIKwg6wzyn2c2lg5Ec69//eujuX88+b+juaqqL37xi9Hc69ZfF++s2oymdnezyrOqqs31rPZsZSmvbJxMno3m/u0rz0Rzw7AasCqv6jt9+ly8c5bWRDaoU00rEI8dOxbNVVXdfvt3RHM7O1l9a1XVhXPno7nZZBzvTG+1k93sWKuqLl185Z5XPQkDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0su8WpaefeCxacNNNr4rmqqp6YZPNLC9NqeFwEM0t9fNGmgpLn157x2uiuQvntrOFVfU/H/5sNHdgPT8/3fkkmltfy66fqqrOPGvsmU7z1rDRSja7fmAjm9vYjOaqqnb3svOzl72VVVU1Cz/Ys2nYvlQLNLLNspagzmwn21dV4+2sJejgxuF453UHs9nJOG9umofndpK2cFXVpbBFcD88CQNAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgkX1XGS71srwehXNVVTXJqqf6g2G8ctDPqgx7g32fyqvs7Wa1XsNuL5r7b6/7rmiuquqfHj4ZzV04/Uy88/OXsoq20SivFXztnXdGc1vbq/HO648ei+Y2Dm1GczccPx7NPT/7bdHcyvqBeOfmRjb76KNfjHdePHs6mltZWormlofZZ7qqauvsV6O5XiffeeToTdHcSljbWVU1Hu9Fc9MFKi23d/LqxWvxJAwAjQhhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANDIvqt/hoOsXWi8lzVeVFXthi1Km4fyho75fB7Npc0eVVXdbvaz0CB8T65fy5t+/vtdd0Vze9PsvFZVPfvVrMnm5D/9U7xzNsnez++4/fZ45/pq9r6M+lkLzub6WjRXVfVfvitrmfrSl74U77w9PLevffWr451f/uL/ieYGNYnmbr7xumiuqqpXs2huNy8Xql4vu/bSe1dVVb+fNdal9/aqqvNjLUoA8E1HCANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADSy706oM2fORAvGk6xeq6rq0HVH49nUAm1XsU63E80Nh8Ov85Fc2yjc2ckvg3rNq8N6wNk43vnM/30ymht18164rQvnormNg1n93d4CNaO9sKrv4IGVeOdXns7ek1tvuS3e+b3/9XujuZ2tC9Hc2lL+mZ6Gdap70+y9rKrqhZWEk0m+s9PJ7peLVBn2B1l94n54EgaARoQwADQihAGgESEMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhk39UQS0tL0YLpLG+VGY1G8Wwua9ro9Xrxxm90K8h0gQaTCo+1FzZFVVXtbG9Fc7e+6ni8c7JzMJrb29mOdz779BPR3Hg3a4uajPPrYHdvN5q77uBGvPPChYvR3HPPfiXeubF+IJrr97Lnm9l8gc9J+H5OZws0GoX3vX7/lWsleindbn6PPnTw0NfxSK7kSRgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaCRfVdZHDlyJFowmUVjVVU1nWYNTFtbWetOVdVoOWuL6vXzho60gSltUcqmntcL22F63bw1ZT7NWl66C7zS1dWswWvcyRqNqqqePPdcNHdpmF2zy0vL0VxV1fIwu2ZHo0G+c7AZzW1dzhqfqqpqns0OhyvR3KyTt86Nwzak8d5evLPTze4HaStfVVWF971JeB+pqpovkGPX4kkYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI3su19ubX09WjBeoMtwFlZW9cN6raqq5eWs3i2t+Kuqms2yc9TpdLK5aGqx6bSusapqMs3qAXsLXAc7l85/Q+eqqjrzrGpteZjVRK4tZ3WNVVWDYVZJ2O0s8DkJ++QGwwWeNcLPWC9s7kzveVVVo+VhNLfIvWvQz66D9N5VVbU3zu4HaTVuVVV3gfvXNb/3K/adAYCXJYQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBoRAgDQCP77vroD5eiBcOlsE6kqtL+pdFS1oRUVbU8zJpIZvO8oWM2zV7pLGwF6czyYx0MstaU8SxrPqmqmkyydqEFCmlqMs+u2/7SSrxzrZOd242NtWzhAo1GaQtOr5/fD4Zhk82ok18I6euchSun4b2gqqoXHuvF3d14Z2eeXUODXnatV1XNe+G1t8j1Hk9emydhAGhECANAI0IYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADSy716xTi+r+OuH1XdVVd1uWJO1wM75NKzNm2VzVVXdsIJsPN6L5ibjRY4162gbL1Cf2OuPorn+/i/vq6Ttd9Np/nNtv3M5mhuOstcZNmE+Lzw/nQXq5Lrd7HV2u3kRXVplmNZvzhb4nHTD/sTRAvWS/X5WL1kLvM6lYXZ/ny/Qbbq3l91r98OTMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBoRAgDQCP7rs/oh00bizQapdLmk6qqTtjcNJ/mDR07Ozvf0LleL2w+qaqVldVorjMdxzunk+z97M7z62B5bS2a63Ty672/nX3GJmEd0nCwFM1VVXU72bF2Ovm1l7aqzeezeOd4nF23aetO+hqrqpaWsvdzdTX7TFdVTcNrb5FWojRT0veyarF75rV4EgaARoQwADQihAGgESEMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI/vuI9va2ooWLFJlOJ9n9YCzWV5d1u+PorkLFy7EO5977rloLj23R48ejeaqqtKWyPS9rKoaDLP3ZLoziXcuryxHcyvLeS3ccJS9n7t7WZ1cWk9aVVXz7Of3bjevhMsrSheoNg13pvegRWpY86rH/LOZWiQX0tm0drGqajgcxrPX4kkYABoRwgDQiBAGgEaEMAA0IoQBoBEhDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgkQVqVPZnkeaKS5cuhTvzFqWtrazl5avPPhPvnEyytp/V1ayxp9fLm2x2Lu9Ec+N5fh10u1mDyWyBdpi0NWw2zVtw+oPs4zgJT+3e3m42WFW98D0ZDLJGrKqqTid7ZpjN8vdkPL4cze3sZJ+TRRqN0s/1Qm1aoUVaidK2qEXue6/kOfIkDACNCGEAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBG9t3PdN1110ULxuNxNFdVderUqWywk1eXbRxYi+YGg0G8sxMeb1rNldb0VVV1w53Dtax2sapqNsuqKff29uKdk73sut3Zznf2hlldWq+X1cLt7CxwfiZZ5d7hQ/nP/cNhVoM4m+fVpum1NxqFxxruW0R6/6nKj3eRXEirXxd5nen7uR+ehAGgESEMAI0IYQBoRAgDQCNCGAAaEcIA0IgQBoBGhDAANCKEAaARIQwAjQhhAGhECANAI0IYABrpzOfzrA4FAFiIJ2EAaEQIA0AjQhgAGhHCANCIEAaARoQwADQihAGgESEMAI0IYQBo5P8BXC4HsFCG9rUAAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 600x600 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "batch_size = 64\n",
    "\n",
    "def one_hot_collate(batch):\n",
    "    data = torch.stack([item[0] for item in batch])\n",
    "    labels = torch.tensor([item[1] for item in batch])\n",
    "    one_hot_labels = torch.zeros(labels.size(0), 10)  # 10 classes in MNIST 【0，1，0，0】\n",
    "    one_hot_labels.scatter_(1, labels.unsqueeze(1), 1)\n",
    "    return data, one_hot_labels\n",
    "\n",
    "trLoader = torch.utils.data.DataLoader(trX, batch_size=batch_size, shuffle=True, num_workers=0, collate_fn=one_hot_collate)\n",
    "cvLoader = torch.utils.data.DataLoader(cvX, batch_size=batch_size, shuffle=False, num_workers=0, collate_fn=one_hot_collate)\n",
    "teLoader = torch.utils.data.DataLoader(testset, batch_size=batch_size, shuffle=False, num_workers=0, collate_fn=one_hot_collate)\n",
    "\n",
    "# Get a batch of training data\n",
    "dataiter = iter(trLoader)\n",
    "data, labels = next(dataiter)\n",
    "\n",
    "image_channels = data[0].numpy().shape[0]\n",
    "print(f'image_channels is {image_channels}')\n",
    "print(labels[0,:])\n",
    "\n",
    "# Label text of CIFAR-10\n",
    "label_text = ['airplane', 'automobile', 'bird', 'cat', 'deer', \n",
    "              'dog', 'frog', 'horse', 'ship', 'truck']\n",
    "\n",
    "# Plot one image from the batch\n",
    "plt.figure(figsize=(6, 6))\n",
    "# Modify the imshow line to handle RGB images correctly\n",
    "plt.imshow(data[0].permute(1, 2, 0).numpy())  # Rearrange from (3,32,32) to (32,32,3)\n",
    "plt.title(f'Label: {label_text[labels[0].argmax().item()]}')\n",
    "plt.axis('off')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 迁移ResNet微调FNN层"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\ProgramData\\anaconda3\\envs\\machinelearning\\lib\\site-packages\\torchvision\\models\\_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.\n",
      "  warnings.warn(\n",
      "C:\\ProgramData\\anaconda3\\envs\\machinelearning\\lib\\site-packages\\torchvision\\models\\_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=ResNet18_Weights.IMAGENET1K_V1`. You can also use `weights=ResNet18_Weights.DEFAULT` to get the most up-to-date weights.\n",
      "  warnings.warn(msg)\n",
      "Downloading: \"https://download.pytorch.org/models/resnet18-f37072fd.pth\" to C:\\Users\\Administrator/.cache\\torch\\hub\\checkpoints\\resnet18-f37072fd.pth\n",
      "100.0%"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "ResNet(\n",
      "  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n",
      "  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "  (relu): ReLU(inplace=True)\n",
      "  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n",
      "  (layer1): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (layer2): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (downsample): Sequential(\n",
      "        (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
      "        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      )\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (layer3): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (downsample): Sequential(\n",
      "        (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
      "        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      )\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (layer4): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (downsample): Sequential(\n",
      "        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
      "        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      )\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))\n",
      "  (fc): Linear(in_features=512, out_features=1000, bias=True)\n",
      ")\n",
      "ResNet(\n",
      "  (conv1): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n",
      "  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "  (relu): ReLU(inplace=True)\n",
      "  (maxpool): Identity()\n",
      "  (layer1): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (layer2): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(64, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (downsample): Sequential(\n",
      "        (0): Conv2d(64, 128, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
      "        (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      )\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (layer3): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(128, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (downsample): Sequential(\n",
      "        (0): Conv2d(128, 256, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
      "        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      )\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (layer4): Sequential(\n",
      "    (0): BasicBlock(\n",
      "      (conv1): Conv2d(256, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (downsample): Sequential(\n",
      "        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n",
      "        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      )\n",
      "    )\n",
      "    (1): BasicBlock(\n",
      "      (conv1): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "      (relu): ReLU(inplace=True)\n",
      "      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n",
      "      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n",
      "    )\n",
      "  )\n",
      "  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))\n",
      "  (fc): Linear(in_features=512, out_features=10, bias=True)\n",
      ")\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "import torch.nn as nn\n",
    "\n",
    "# 1. 加载预训练模型\n",
    "model = torchvision.models.resnet18(pretrained=True)\n",
    "print(model)\n",
    "# 2. 修改输入层 (因为 MNIST 是单通道图像)\n",
    "model.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1)\n",
    "\n",
    "# 3. 移除第一层Maxpooling避免参数过早消失\n",
    "model.maxpool = nn.Identity() # nn.Conv2d(64, 64, 1, 1, 1)\n",
    "\n",
    "# 4. 修改输出层 (根据任务的类别数)\n",
    "model.fc = nn.Linear(model.fc.in_features, 10)  # 10为MNIST的类别数\n",
    "\n",
    "# 打印模型结构\n",
    "print(model)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "微调ResNet18"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/5, Training Loss: 2.3144\n",
      "Epoch 1/5, Training Loss: 2.1852\n",
      "Epoch 1/5, Training Loss: 2.0827\n",
      "Epoch 1/5, Training Loss: 2.2441\n",
      "Epoch 1/5, Training Loss: 1.9825\n",
      "Epoch 1/5, Training Loss: 1.6930\n",
      "Epoch 1/5, Training Loss: 2.2052\n",
      "Epoch 1/5, Training Loss: 1.8826\n",
      "Epoch 1/5, Training Loss: 1.7442\n",
      "Epoch 1/5, Training Loss: 1.7337\n",
      "Epoch 1/5, Training Loss: 1.8306\n",
      "Epoch 1/5, Training Loss: 1.5802\n",
      "Epoch 1/5, Training Loss: 1.8012\n",
      "Epoch 1/5, Training Loss: 1.9382\n",
      "Epoch 1/5, Training Loss: 1.7825\n",
      "Epoch 1/5, Training Loss: 1.7174\n",
      "Epoch 1/5, Training Loss: 1.8596\n",
      "Epoch 1/5, Training Loss: 1.8931\n",
      "Epoch 1/5, Training Loss: 1.6140\n",
      "Epoch 1/5, Training Loss: 1.4446\n",
      "Epoch 1/5, Training Loss: 1.7971\n",
      "Epoch 1/5, Training Loss: 1.6436\n",
      "Epoch 1/5, Training Loss: 1.6495\n",
      "Epoch 1/5, Training Loss: 1.5259\n",
      "Epoch 1/5, Training Loss: 1.4249\n",
      "Epoch 1/5, Training Loss: 1.7766\n",
      "Epoch 1/5, Training Loss: 1.2768\n",
      "Epoch 1/5, Training Loss: 1.7683\n",
      "Epoch 1/5, Training Loss: 1.3617\n",
      "Epoch 1/5, Training Loss: 1.3073\n",
      "Epoch 1/5, Training Loss: 1.5886\n",
      "Epoch 1/5, Training Loss: 1.5964\n",
      "Epoch 1/5, Training Loss: 1.4159\n",
      "Epoch 1/5, Training Loss: 1.0825\n",
      "Epoch 1/5, Training Loss: 1.4307\n",
      "Epoch 1/5, Training Loss: 1.3890\n",
      "Epoch 1/5, Training Loss: 1.3337\n",
      "Epoch 1/5, Training Loss: 1.1788\n",
      "Epoch 1/5, Training Loss: 1.5769\n",
      "Epoch 1/5, Training Loss: 1.4290\n",
      "Epoch 1/5, Training Loss: 1.2119\n",
      "Epoch 1/5, Training Loss: 1.1966\n",
      "Epoch 1/5, Training Loss: 1.4446\n",
      "Epoch 1/5, Training Loss: 1.0413\n",
      "Epoch 1/5, Training Loss: 0.9936\n",
      "Epoch 1/5, Training Loss: 1.4009\n",
      "Epoch 1/5, Training Loss: 1.1713\n",
      "Epoch 1/5, Training Loss: 1.4800\n",
      "Epoch 1/5, Training Loss: 1.2648\n",
      "Epoch 1/5, Training Loss: 0.9822\n",
      "Epoch 1/5, Training Loss: 1.3625\n",
      "Epoch 1/5, Training Loss: 1.2160\n",
      "Epoch 1/5, Training Loss: 1.3136\n",
      "Epoch 1/5, Training Loss: 1.0941\n",
      "Epoch 1/5, Training Loss: 1.1205\n",
      "Epoch 1/5, Training Loss: 1.1513\n",
      "Epoch 1/5, Training Loss: 1.1551\n",
      "Epoch 1/5, Training Loss: 1.1155\n",
      "Epoch 1/5, Training Loss: 1.2939\n",
      "Epoch 1/5, Training Loss: 1.2674\n",
      "Epoch 1/5, Training Loss: 1.3758\n",
      "Epoch 1/5, Training Loss: 1.3126\n",
      "Epoch 1/5, Training Loss: 1.4623\n",
      "Epoch 1/5, Training Loss: 1.2622\n",
      "Epoch 1/5, Training Loss: 1.1406\n",
      "Epoch 1/5, Training Loss: 1.4776\n",
      "Epoch 1/5, Training Loss: 0.9793\n",
      "Epoch 1/5, Training Loss: 1.2489\n",
      "Epoch 1/5, Training Loss: 1.1707\n",
      "Epoch 1/5, Training Loss: 1.1217\n",
      "Epoch 1/5, Training Loss: 0.9479\n",
      "Epoch 1/5, Training Loss: 1.1545\n",
      "Epoch 1/5, Training Loss: 0.9300\n",
      "Epoch 1/5, Training Loss: 1.1990\n",
      "Epoch 1/5, Training Loss: 1.2533\n",
      "Epoch 1/5, Training Loss: 0.8959\n",
      "Epoch 1/5, Training Loss: 1.1881\n",
      "Epoch 1/5, Training Loss: 1.1604\n",
      "Epoch 1/5, Training Loss: 1.2279\n",
      "Epoch 1/5, Training Loss: 1.4752\n",
      "Epoch 1/5, Training Loss: 1.4153\n",
      "Epoch 1/5, Training Loss: 1.0952\n",
      "Epoch 1/5, Training Loss: 1.1720\n",
      "Epoch 1/5, Training Loss: 1.2355\n",
      "Epoch 1/5, Training Loss: 1.0540\n",
      "Epoch 1/5, Training Loss: 0.9677\n",
      "Epoch 1/5, Training Loss: 1.0755\n",
      "Epoch 1/5, Training Loss: 0.9518\n",
      "Epoch 1/5, Training Loss: 1.1363\n",
      "Epoch 1/5, Training Loss: 1.1512\n",
      "Epoch 1/5, Training Loss: 1.0681\n",
      "Epoch 1/5, Training Loss: 1.1461\n",
      "Epoch 1/5, Training Loss: 1.3877\n",
      "Epoch 1/5, Training Loss: 0.9878\n",
      "Epoch 1/5, Training Loss: 0.7485\n",
      "Epoch 1/5, Training Loss: 1.1471\n",
      "Epoch 1/5, Training Loss: 0.8332\n",
      "Epoch 1/5, Training Loss: 0.9250\n",
      "Epoch 1/5, Training Loss: 1.0908\n",
      "Epoch 1/5, Training Loss: 0.9490\n",
      "Epoch 1/5, Training Loss: 0.9077\n",
      "Epoch 1/5, Training Loss: 0.9419\n",
      "Epoch 1/5, Training Loss: 1.3725\n",
      "Epoch 1/5, Training Loss: 1.0686\n",
      "Epoch 1/5, Training Loss: 1.1445\n",
      "Epoch 1/5, Training Loss: 0.9345\n",
      "Epoch 1/5, Training Loss: 0.9188\n",
      "Epoch 1/5, Training Loss: 0.8935\n",
      "Epoch 1/5, Training Loss: 0.8813\n",
      "Epoch 1/5, Training Loss: 0.8527\n",
      "Epoch 1/5, Training Loss: 1.0672\n",
      "Epoch 1/5, Training Loss: 0.9776\n",
      "Epoch 1/5, Training Loss: 0.9026\n",
      "Epoch 1/5, Training Loss: 0.9789\n",
      "Epoch 1/5, Training Loss: 1.1287\n",
      "Epoch 1/5, Training Loss: 1.1489\n",
      "Epoch 1/5, Training Loss: 1.1984\n",
      "Epoch 1/5, Training Loss: 0.9927\n",
      "Epoch 1/5, Training Loss: 0.7473\n",
      "Epoch 1/5, Training Loss: 0.9882\n",
      "Epoch 1/5, Training Loss: 0.7385\n",
      "Epoch 1/5, Training Loss: 0.7475\n",
      "Epoch 1/5, Training Loss: 1.1244\n",
      "Epoch 1/5, Training Loss: 1.0561\n",
      "Epoch 1/5, Training Loss: 0.8207\n",
      "Epoch 1/5, Training Loss: 0.8597\n",
      "Epoch 1/5, Training Loss: 1.0328\n",
      "Epoch 1/5, Training Loss: 0.9051\n",
      "Epoch 1/5, Training Loss: 0.9229\n",
      "Epoch 1/5, Training Loss: 0.8278\n",
      "Epoch 1/5, Training Loss: 1.1271\n",
      "Epoch 1/5, Training Loss: 0.9505\n",
      "Epoch 1/5, Training Loss: 0.9280\n",
      "Epoch 1/5, Training Loss: 1.0619\n",
      "Epoch 1/5, Training Loss: 1.0639\n",
      "Epoch 1/5, Training Loss: 1.0166\n",
      "Epoch 1/5, Training Loss: 0.8527\n",
      "Epoch 1/5, Training Loss: 0.7865\n",
      "Epoch 1/5, Training Loss: 0.8494\n",
      "Epoch 1/5, Training Loss: 0.8426\n",
      "Epoch 1/5, Training Loss: 1.0849\n",
      "Epoch 1/5, Training Loss: 0.8660\n",
      "Epoch 1/5, Training Loss: 0.8775\n",
      "Epoch 1/5, Training Loss: 0.9118\n",
      "Epoch 1/5, Training Loss: 0.6455\n",
      "Epoch 1/5, Training Loss: 0.6766\n",
      "Epoch 1/5, Training Loss: 1.0770\n",
      "Epoch 1/5, Training Loss: 0.7580\n",
      "Epoch 1/5, Training Loss: 0.8452\n",
      "Epoch 1/5, Training Loss: 0.7257\n",
      "Epoch 1/5, Training Loss: 0.8723\n",
      "Epoch 1/5, Training Loss: 1.0150\n",
      "Epoch 1/5, Training Loss: 0.6980\n",
      "Epoch 1/5, Training Loss: 0.7056\n",
      "Epoch 1/5, Training Loss: 0.8062\n",
      "Epoch 1/5, Training Loss: 1.2158\n",
      "Epoch 1/5, Training Loss: 0.8343\n",
      "Epoch 1/5, Training Loss: 1.0023\n",
      "Epoch 1/5, Training Loss: 0.8907\n",
      "Epoch 1/5, Training Loss: 0.9710\n",
      "Epoch 1/5, Training Loss: 0.8962\n",
      "Epoch 1/5, Training Loss: 0.9550\n",
      "Epoch 1/5, Training Loss: 0.9157\n",
      "Epoch 1/5, Training Loss: 0.9172\n",
      "Epoch 1/5, Training Loss: 0.9173\n",
      "Epoch 1/5, Training Loss: 0.9194\n",
      "Epoch 1/5, Training Loss: 0.9423\n",
      "Epoch 1/5, Training Loss: 1.1268\n",
      "Epoch 1/5, Training Loss: 0.9207\n",
      "Epoch 1/5, Training Loss: 0.9118\n",
      "Epoch 1/5, Training Loss: 0.8420\n",
      "Epoch 1/5, Training Loss: 0.9792\n",
      "Epoch 1/5, Training Loss: 0.8052\n",
      "Epoch 1/5, Training Loss: 1.0248\n",
      "Epoch 1/5, Training Loss: 1.1007\n",
      "Epoch 1/5, Training Loss: 0.8805\n",
      "Epoch 1/5, Training Loss: 0.9395\n",
      "Epoch 1/5, Training Loss: 0.6960\n",
      "Epoch 1/5, Training Loss: 0.6653\n",
      "Epoch 1/5, Training Loss: 0.7439\n",
      "Epoch 1/5, Training Loss: 0.8840\n",
      "Epoch 1/5, Training Loss: 0.7975\n",
      "Epoch 1/5, Training Loss: 0.9885\n",
      "Epoch 1/5, Training Loss: 0.7915\n",
      "Epoch 1/5, Training Loss: 0.5454\n",
      "Epoch 1/5, Training Loss: 0.7471\n",
      "Epoch 1/5, Training Loss: 0.9689\n",
      "Epoch 1/5, Training Loss: 0.9311\n",
      "Epoch 1/5, Training Loss: 0.5472\n",
      "Epoch 1/5, Training Loss: 0.9729\n",
      "Epoch 1/5, Training Loss: 1.0518\n",
      "Epoch 1/5, Training Loss: 0.7045\n",
      "Epoch 1/5, Training Loss: 0.8635\n",
      "Epoch 1/5, Training Loss: 1.0516\n",
      "Epoch 1/5, Training Loss: 0.8585\n",
      "Epoch 1/5, Training Loss: 0.7987\n",
      "Epoch 1/5, Training Loss: 0.7284\n",
      "Epoch 1/5, Training Loss: 0.9289\n",
      "Epoch 1/5, Training Loss: 0.7295\n",
      "Epoch 1/5, Training Loss: 0.5524\n",
      "Epoch 1/5, Training Loss: 0.8132\n",
      "Epoch 1/5, Training Loss: 0.6047\n",
      "Epoch 1/5, Training Loss: 0.9307\n",
      "Epoch 1/5, Training Loss: 0.7100\n",
      "Epoch 1/5, Training Loss: 0.8241\n",
      "Epoch 1/5, Training Loss: 0.8715\n",
      "Epoch 1/5, Training Loss: 1.1446\n",
      "Epoch 1/5, Training Loss: 0.7646\n",
      "Epoch 1/5, Training Loss: 0.8602\n",
      "Epoch 1/5, Training Loss: 0.7544\n",
      "Epoch 1/5, Training Loss: 0.9883\n",
      "Epoch 1/5, Training Loss: 1.2138\n",
      "Epoch 1/5, Training Loss: 1.0791\n",
      "Epoch 1/5, Training Loss: 0.6454\n",
      "Epoch 1/5, Training Loss: 0.7905\n",
      "Epoch 1/5, Training Loss: 0.8299\n",
      "Epoch 1/5, Training Loss: 0.8445\n",
      "Epoch 1/5, Training Loss: 1.0049\n",
      "Epoch 1/5, Training Loss: 0.6708\n",
      "Epoch 1/5, Training Loss: 0.8715\n",
      "Epoch 1/5, Training Loss: 0.9817\n",
      "Epoch 1/5, Training Loss: 1.0205\n",
      "Epoch 1/5, Training Loss: 0.5538\n",
      "Epoch 1/5, Training Loss: 0.7193\n",
      "Epoch 1/5, Training Loss: 0.7557\n",
      "Epoch 1/5, Training Loss: 0.5848\n",
      "Epoch 1/5, Training Loss: 0.8757\n",
      "Epoch 1/5, Training Loss: 0.7818\n",
      "Epoch 1/5, Training Loss: 0.6666\n",
      "Epoch 1/5, Training Loss: 0.8098\n",
      "Epoch 1/5, Training Loss: 0.9651\n",
      "Epoch 1/5, Training Loss: 0.9430\n",
      "Epoch 1/5, Training Loss: 0.5960\n",
      "Epoch 1/5, Training Loss: 0.7519\n",
      "Epoch 1/5, Training Loss: 0.7146\n",
      "Epoch 1/5, Training Loss: 0.5405\n",
      "Epoch 1/5, Training Loss: 0.6553\n",
      "Epoch 1/5, Training Loss: 0.7104\n",
      "Epoch 1/5, Training Loss: 0.6357\n",
      "Epoch 1/5, Training Loss: 0.7035\n",
      "Epoch 1/5, Training Loss: 0.7210\n",
      "Epoch 1/5, Training Loss: 0.6024\n",
      "Epoch 1/5, Training Loss: 0.8726\n",
      "Epoch 1/5, Training Loss: 0.7979\n",
      "Epoch 1/5, Training Loss: 0.4289\n",
      "Epoch 1/5, Training Loss: 1.0810\n",
      "Epoch 1/5, Training Loss: 0.8569\n",
      "Epoch 1/5, Training Loss: 0.7784\n",
      "Epoch 1/5, Training Loss: 0.8831\n",
      "Epoch 1/5, Training Loss: 0.9903\n",
      "Epoch 1/5, Training Loss: 0.8389\n",
      "Epoch 1/5, Training Loss: 0.7257\n",
      "Epoch 1/5, Training Loss: 0.8339\n",
      "Epoch 1/5, Training Loss: 0.8366\n",
      "Epoch 1/5, Training Loss: 0.9381\n",
      "Epoch 1/5, Training Loss: 0.7057\n",
      "Epoch 1/5, Training Loss: 0.8072\n",
      "Epoch 1/5, Training Loss: 0.7014\n",
      "Epoch 1/5, Training Loss: 0.6069\n",
      "Epoch 1/5, Training Loss: 0.8383\n",
      "Epoch 1/5, Training Loss: 0.6206\n",
      "Epoch 1/5, Training Loss: 0.7704\n",
      "Epoch 1/5, Training Loss: 0.6075\n",
      "Epoch 1/5, Training Loss: 0.6700\n",
      "Epoch 1/5, Training Loss: 0.7049\n",
      "Epoch 1/5, Training Loss: 0.7895\n",
      "Epoch 1/5, Training Loss: 0.8831\n",
      "Epoch 1/5, Training Loss: 0.6772\n",
      "Epoch 1/5, Training Loss: 0.8369\n",
      "Epoch 1/5, Training Loss: 0.7791\n",
      "Epoch 1/5, Training Loss: 0.5547\n",
      "Epoch 1/5, Training Loss: 0.8054\n",
      "Epoch 1/5, Training Loss: 0.7165\n",
      "Epoch 1/5, Training Loss: 0.7354\n",
      "Epoch 1/5, Training Loss: 0.8429\n",
      "Epoch 1/5, Training Loss: 0.4647\n",
      "Epoch 1/5, Training Loss: 0.6797\n",
      "Epoch 1/5, Training Loss: 0.8479\n",
      "Epoch 1/5, Training Loss: 0.6935\n",
      "Epoch 1/5, Training Loss: 0.8038\n",
      "Epoch 1/5, Training Loss: 0.6583\n",
      "Epoch 1/5, Training Loss: 0.8620\n",
      "Epoch 1/5, Training Loss: 0.7159\n",
      "Epoch 1/5, Training Loss: 0.7147\n",
      "Epoch 1/5, Training Loss: 0.8938\n",
      "Epoch 1/5, Training Loss: 0.8848\n",
      "Epoch 1/5, Training Loss: 0.5610\n",
      "Epoch 1/5, Training Loss: 0.7594\n",
      "Epoch 1/5, Training Loss: 0.5446\n",
      "Epoch 1/5, Training Loss: 0.7291\n",
      "Epoch 1/5, Training Loss: 0.6605\n",
      "Epoch 1/5, Training Loss: 0.6815\n",
      "Epoch 1/5, Training Loss: 0.7697\n",
      "Epoch 1/5, Training Loss: 0.8603\n",
      "Epoch 1/5, Training Loss: 0.6331\n",
      "Epoch 1/5, Training Loss: 1.0850\n",
      "Epoch 1/5, Training Loss: 0.7698\n",
      "Epoch 1/5, Training Loss: 0.9101\n",
      "Epoch 1/5, Training Loss: 0.4947\n",
      "Epoch 1/5, Training Loss: 0.7267\n",
      "Epoch 1/5, Training Loss: 0.9231\n",
      "Epoch 1/5, Training Loss: 0.8656\n",
      "Epoch 1/5, Training Loss: 0.7640\n",
      "Epoch 1/5, Training Loss: 1.1042\n",
      "Epoch 1/5, Training Loss: 0.5920\n",
      "Epoch 1/5, Training Loss: 1.0791\n",
      "Epoch 1/5, Training Loss: 0.8265\n",
      "Epoch 1/5, Training Loss: 0.7230\n",
      "Epoch 1/5, Training Loss: 0.5444\n",
      "Epoch 1/5, Training Loss: 0.4137\n",
      "Epoch 1/5, Training Loss: 0.8407\n",
      "Epoch 1/5, Training Loss: 0.7485\n",
      "Epoch 1/5, Training Loss: 0.9716\n",
      "Epoch 1/5, Training Loss: 0.8030\n",
      "Epoch 1/5, Training Loss: 0.7630\n",
      "Epoch 1/5, Training Loss: 0.8840\n",
      "Epoch 1/5, Training Loss: 0.5221\n",
      "Epoch 1/5, Training Loss: 1.1599\n",
      "Epoch 1/5, Training Loss: 0.6618\n",
      "Epoch 1/5, Training Loss: 0.7846\n",
      "Epoch 1/5, Training Loss: 0.7469\n",
      "Epoch 1/5, Training Loss: 0.5460\n",
      "Epoch 1/5, Training Loss: 0.6648\n",
      "Epoch 1/5, Training Loss: 0.6431\n",
      "Epoch 1/5, Training Loss: 0.7648\n",
      "Epoch 1/5, Training Loss: 0.6512\n",
      "Epoch 1/5, Training Loss: 0.6370\n",
      "Epoch 1/5, Training Loss: 0.8889\n",
      "Epoch 1/5, Training Loss: 1.0282\n",
      "Epoch 1/5, Training Loss: 0.5700\n",
      "Epoch 1/5, Training Loss: 0.7441\n",
      "Epoch 1/5, Training Loss: 0.6595\n",
      "Epoch 1/5, Training Loss: 0.5409\n",
      "Epoch 1/5, Training Loss: 1.0580\n",
      "Epoch 1/5, Training Loss: 0.6868\n",
      "Epoch 1/5, Training Loss: 0.7946\n",
      "Epoch 1/5, Training Loss: 0.8366\n",
      "Epoch 1/5, Training Loss: 0.7965\n",
      "Epoch 1/5, Training Loss: 0.6106\n",
      "Epoch 1/5, Training Loss: 0.4157\n",
      "Epoch 1/5, Training Loss: 0.4522\n",
      "Epoch 1/5, Training Loss: 0.8433\n",
      "Epoch 1/5, Training Loss: 0.7866\n",
      "Epoch 1/5, Training Loss: 0.6215\n",
      "Epoch 1/5, Training Loss: 0.9562\n",
      "Epoch 1/5, Training Loss: 0.6325\n",
      "Epoch 1/5, Training Loss: 0.7723\n",
      "Epoch 1/5, Training Loss: 0.6216\n",
      "Epoch 1/5, Training Loss: 0.9719\n",
      "Epoch 1/5, Training Loss: 0.5060\n",
      "Epoch 1/5, Training Loss: 0.6563\n",
      "Epoch 1/5, Training Loss: 0.6921\n",
      "Epoch 1/5, Training Loss: 0.7265\n",
      "Epoch 1/5, Training Loss: 0.5289\n",
      "Epoch 1/5, Training Loss: 0.6629\n",
      "Epoch 1/5, Training Loss: 0.7836\n",
      "Epoch 1/5, Training Loss: 0.9557\n",
      "Epoch 1/5, Training Loss: 0.6688\n",
      "Epoch 1/5, Training Loss: 0.8970\n",
      "Epoch 1/5, Training Loss: 0.5226\n",
      "Epoch 1/5, Training Loss: 0.5404\n",
      "Epoch 1/5, Training Loss: 0.5858\n",
      "Epoch 1/5, Training Loss: 0.6759\n",
      "Epoch 1/5, Training Loss: 0.7244\n",
      "Epoch 1/5, Training Loss: 0.6447\n",
      "Epoch 1/5, Training Loss: 0.7204\n",
      "Epoch 1/5, Training Loss: 0.8636\n",
      "Epoch 1/5, Training Loss: 0.9099\n",
      "Epoch 1/5, Training Loss: 0.7735\n",
      "Epoch 1/5, Training Loss: 0.4753\n",
      "Epoch 1/5, Training Loss: 0.5428\n",
      "Epoch 1/5, Training Loss: 0.8317\n",
      "Epoch 1/5, Training Loss: 0.5654\n",
      "Epoch 1/5, Training Loss: 0.8113\n",
      "Epoch 1/5, Training Loss: 0.6560\n",
      "Epoch 1/5, Training Loss: 0.7950\n",
      "Epoch 1/5, Training Loss: 0.3334\n",
      "Epoch 1/5, Training Loss: 0.5231\n",
      "Epoch 1/5, Training Loss: 0.4994\n",
      "Epoch 1/5, Training Loss: 0.6394\n",
      "Epoch 1/5, Training Loss: 0.8917\n",
      "Epoch 1/5, Training Loss: 0.6153\n",
      "Epoch 1/5, Training Loss: 0.8282\n",
      "Epoch 1/5, Training Loss: 0.4771\n",
      "Epoch 1/5, Training Loss: 0.3949\n",
      "Epoch 1/5, Training Loss: 0.6915\n",
      "Epoch 1/5, Training Loss: 0.5544\n",
      "Epoch 1/5, Training Loss: 0.6347\n",
      "Epoch 1/5, Training Loss: 0.8746\n",
      "Epoch 1/5, Training Loss: 0.7496\n",
      "Epoch 1/5, Training Loss: 0.7857\n",
      "Epoch 1/5, Training Loss: 0.7443\n",
      "Epoch 1/5, Training Loss: 0.5885\n",
      "Epoch 1/5, Training Loss: 0.7882\n",
      "Epoch 1/5, Training Loss: 0.5316\n",
      "Epoch 1/5, Training Loss: 0.6066\n",
      "Epoch 1/5, Training Loss: 0.6075\n",
      "Epoch 1/5, Training Loss: 0.6288\n",
      "Epoch 1/5, Training Loss: 0.6025\n",
      "Epoch 1/5, Training Loss: 0.6377\n",
      "Epoch 1/5, Training Loss: 0.7996\n",
      "Epoch 1/5, Training Loss: 0.5880\n",
      "Epoch 1/5, Training Loss: 0.6999\n",
      "Epoch 1/5, Training Loss: 0.9753\n",
      "Epoch 1/5, Training Loss: 0.7657\n",
      "Epoch 1/5, Training Loss: 0.7117\n",
      "Epoch 1/5, Training Loss: 0.5848\n",
      "Epoch 1/5, Training Loss: 0.7633\n",
      "Epoch 1/5, Training Loss: 0.5038\n",
      "Epoch 1/5, Training Loss: 0.7956\n",
      "Epoch 1/5, Training Loss: 0.4712\n",
      "Epoch 1/5, Training Loss: 0.6721\n",
      "Epoch 1/5, Training Loss: 0.6890\n",
      "Epoch 1/5, Training Loss: 0.8275\n",
      "Epoch 1/5, Training Loss: 0.6757\n",
      "Epoch 1/5, Training Loss: 0.6839\n",
      "Epoch 1/5, Training Loss: 0.6662\n",
      "Epoch 1/5, Training Loss: 0.6595\n",
      "Epoch 1/5, Training Loss: 0.6253\n",
      "Epoch 1/5, Training Loss: 0.5483\n",
      "Epoch 1/5, Training Loss: 0.7880\n",
      "Epoch 1/5, Training Loss: 0.6844\n",
      "Epoch 1/5, Training Loss: 0.5587\n",
      "Epoch 1/5, Training Loss: 0.5410\n",
      "Epoch 1/5, Training Loss: 0.9332\n",
      "Epoch 1/5, Training Loss: 0.4786\n",
      "Epoch 1/5, Training Loss: 0.5023\n",
      "Epoch 1/5, Training Loss: 0.6396\n",
      "Epoch 1/5, Training Loss: 0.4979\n",
      "Epoch 1/5, Training Loss: 0.4862\n",
      "Epoch 1/5, Training Loss: 0.6717\n",
      "Epoch 1/5, Training Loss: 0.7481\n",
      "Epoch 1/5, Training Loss: 0.7015\n",
      "Epoch 1/5, Training Loss: 0.5752\n",
      "Epoch 1/5, Training Loss: 0.5881\n",
      "Epoch 1/5, Training Loss: 0.8963\n",
      "Epoch 1/5, Training Loss: 0.7597\n",
      "Epoch 1/5, Training Loss: 0.7969\n",
      "Epoch 1/5, Training Loss: 0.5039\n",
      "Epoch 1/5, Training Loss: 0.5206\n",
      "Epoch 1/5, Training Loss: 0.8414\n",
      "Epoch 1/5, Training Loss: 0.5726\n",
      "Epoch 1/5, Training Loss: 0.7684\n",
      "Epoch 1/5, Training Loss: 0.6691\n",
      "Epoch 1/5, Training Loss: 0.6947\n",
      "Epoch 1/5, Training Loss: 0.6412\n",
      "Epoch 1/5, Training Loss: 0.7486\n",
      "Epoch 1/5, Training Loss: 0.6341\n",
      "Epoch 1/5, Training Loss: 0.6292\n",
      "Epoch 1/5, Training Loss: 0.7521\n",
      "Epoch 1/5, Training Loss: 0.8407\n",
      "Epoch 1/5, Training Loss: 0.8045\n",
      "Epoch 1/5, Training Loss: 0.6133\n",
      "Epoch 1/5, Training Loss: 0.5364\n",
      "Epoch 1/5, Training Loss: 0.6998\n",
      "Epoch 1/5, Training Loss: 0.7577\n",
      "Epoch 1/5, Training Loss: 0.8198\n",
      "Epoch 1/5, Training Loss: 0.6616\n",
      "Epoch 1/5, Training Loss: 0.5393\n",
      "Epoch 1/5, Training Loss: 0.4940\n",
      "Epoch 1/5, Training Loss: 0.4957\n",
      "Epoch 1/5, Training Loss: 0.8095\n",
      "Epoch 1/5, Training Loss: 0.6589\n",
      "Epoch 1/5, Training Loss: 0.7763\n",
      "Epoch 1/5, Training Loss: 0.6820\n",
      "Epoch 1/5, Training Loss: 0.5704\n",
      "Epoch 1/5, Training Loss: 0.5769\n",
      "Epoch 1/5, Training Loss: 0.3965\n",
      "Epoch 1/5, Training Loss: 0.6686\n",
      "Epoch 1/5, Training Loss: 0.3714\n",
      "Epoch 1/5, Training Loss: 0.4781\n",
      "Epoch 1/5, Training Loss: 0.6221\n",
      "Epoch 1/5, Training Loss: 0.5525\n",
      "Epoch 1/5, Training Loss: 0.8976\n",
      "Epoch 1/5, Training Loss: 0.6261\n",
      "Epoch 1/5, Training Loss: 0.6593\n",
      "Epoch 1/5, Training Loss: 0.7660\n",
      "Epoch 1/5, Training Loss: 0.6078\n",
      "Epoch 1/5, Training Loss: 0.5616\n",
      "Epoch 1/5, Training Loss: 0.6615\n",
      "Epoch 1/5, Training Loss: 0.7045\n",
      "Epoch 1/5, Training Loss: 0.7721\n",
      "Epoch 1/5, Training Loss: 0.8547\n",
      "Epoch 1/5, Training Loss: 0.6937\n",
      "Epoch 1/5, Training Loss: 0.7699\n",
      "Epoch 1/5, Training Loss: 0.4614\n",
      "Epoch 1/5, Training Loss: 0.5000\n",
      "Epoch 1/5, Training Loss: 0.7100\n",
      "Epoch 1/5, Training Loss: 0.6752\n",
      "Epoch 1/5, Training Loss: 0.6044\n",
      "Epoch 1/5, Training Loss: 0.7127\n",
      "Epoch 1/5, Training Loss: 0.5970\n",
      "Epoch 1/5, Training Loss: 0.5711\n",
      "Epoch 1/5, Training Loss: 0.5116\n",
      "Epoch 1/5, Training Loss: 0.5488\n",
      "Epoch 1/5, Training Loss: 0.3664\n",
      "Epoch 1/5, Training Loss: 0.8579\n",
      "Epoch 1/5, Training Loss: 0.6513\n",
      "Epoch 1/5, Training Loss: 0.4789\n",
      "Epoch 1/5, Training Loss: 0.5850\n",
      "Epoch 1/5, Training Loss: 0.4551\n",
      "Epoch 1/5, Training Loss: 0.5145\n",
      "Epoch 1/5, Training Loss: 0.6113\n",
      "Epoch 1/5, Training Loss: 0.4452\n",
      "Epoch 1/5, Training Loss: 0.8000\n",
      "Epoch 1/5, Training Loss: 0.7404\n",
      "Epoch 1/5, Training Loss: 0.4773\n",
      "Epoch 1/5, Training Loss: 0.5147\n",
      "Epoch 1/5, Training Loss: 0.4991\n",
      "Epoch 1/5, Training Loss: 0.4857\n",
      "Epoch 1/5, Training Loss: 0.7849\n",
      "Epoch 1/5, Training Loss: 0.7965\n",
      "Epoch 1/5, Training Loss: 0.4398\n",
      "Epoch 1/5, Training Loss: 0.6586\n",
      "Epoch 1/5, Training Loss: 0.6602\n",
      "Epoch 1/5, Training Loss: 0.6089\n",
      "Epoch 1/5, Training Loss: 0.7898\n",
      "Epoch 1/5, Training Loss: 0.7648\n",
      "Epoch 1/5, Training Loss: 0.4742\n",
      "Epoch 1/5, Training Loss: 0.4746\n",
      "Epoch 1/5, Training Loss: 0.5442\n",
      "Epoch 1/5, Training Loss: 0.5488\n",
      "Epoch 1/5, Training Loss: 0.6022\n",
      "Epoch 1/5, Training Loss: 0.5108\n",
      "Epoch 1/5, Training Loss: 0.4779\n",
      "Epoch 1/5, Training Loss: 0.5678\n",
      "Epoch 1/5, Training Loss: 0.4728\n",
      "Epoch 1/5, Training Loss: 0.5027\n",
      "Epoch 1/5, Training Loss: 0.5768\n",
      "Epoch 1/5, Training Loss: 0.7008\n",
      "Epoch 1/5, Training Loss: 0.6853\n",
      "Epoch 1/5, Training Loss: 0.5575\n",
      "Epoch 1/5, Training Loss: 0.5719\n",
      "Epoch 1/5, Training Loss: 0.6031\n",
      "Epoch 1/5, Training Loss: 0.6230\n",
      "Epoch 1/5, Training Loss: 0.6441\n",
      "Epoch 1/5, Training Loss: 0.4684\n",
      "Epoch 1/5, Training Loss: 0.5942\n",
      "Epoch 1/5, Training Loss: 0.6171\n",
      "Epoch 1/5, Training Loss: 0.5685\n",
      "Epoch 1/5, Training Loss: 0.4822\n",
      "Epoch 1/5, Training Loss: 0.6174\n",
      "Epoch 1/5, Training Loss: 0.4722\n",
      "Epoch 1/5, Training Loss: 0.6009\n",
      "Epoch 1/5, Training Loss: 0.3742\n",
      "Epoch 1/5, Training Loss: 0.4795\n",
      "Epoch 1/5, Training Loss: 0.5660\n",
      "Epoch 1/5, Training Loss: 0.7747\n",
      "Epoch 1/5, Training Loss: 0.7827\n",
      "Epoch 1/5, Training Loss: 0.5522\n",
      "Epoch 1/5, Training Loss: 0.5832\n",
      "Epoch 1/5, Training Loss: 0.7778\n",
      "Epoch 1/5, Training Loss: 0.5526\n",
      "Epoch 1/5, Training Loss: 0.4245\n",
      "Epoch 1/5, Training Loss: 0.7048\n",
      "Epoch 1/5, Training Loss: 1.0127\n",
      "Epoch 1/5, Training Loss: 0.6079\n",
      "Epoch 1/5, Training Loss: 0.8235\n",
      "Epoch 1/5, Training Loss: 0.8027\n",
      "Epoch 1/5, Training Loss: 0.8424\n",
      "Epoch 1/5, Training Loss: 0.6017\n",
      "Epoch 1/5, Training Loss: 0.4154\n",
      "Epoch 1/5, Training Loss: 0.5423\n",
      "Epoch 1/5, Training Loss: 0.6926\n",
      "Epoch 1/5, Training Loss: 1.0349\n",
      "Epoch 1/5, Training Loss: 0.6604\n",
      "Epoch 1/5, Training Loss: 0.6558\n",
      "Epoch 1/5, Training Loss: 0.9046\n",
      "Epoch 1/5, Training Loss: 0.5884\n",
      "Epoch 1/5, Training Loss: 0.6866\n",
      "Epoch 1/5, Training Loss: 1.0334\n",
      "Epoch 1/5, Training Loss: 0.5959\n",
      "Epoch 1/5, Training Loss: 0.6421\n",
      "Epoch 1/5, Training Loss: 0.4549\n",
      "Epoch 1/5, Training Loss: 0.6132\n",
      "Epoch 1/5, Training Loss: 0.6518\n",
      "Epoch 1/5, Training Loss: 0.5942\n",
      "Epoch 1/5, Training Loss: 0.7394\n",
      "Epoch 1/5, Training Loss: 0.4944\n",
      "Epoch 1/5, Training Loss: 0.4642\n",
      "Epoch 1/5, Training Loss: 0.6739\n",
      "Epoch 1/5, Training Loss: 0.5142\n",
      "Epoch 1/5, Training Loss: 0.5690\n",
      "Epoch 1/5, Training Loss: 0.5809\n",
      "Epoch 1/5, Training Loss: 0.6775\n",
      "Epoch 1/5, Training Loss: 0.3672\n",
      "Epoch 1/5, Training Loss: 0.9188\n",
      "Epoch 1/5, Training Loss: 0.5322\n",
      "Epoch 1/5, Training Loss: 0.5394\n",
      "Epoch 1/5, Training Loss: 0.5946\n",
      "Epoch 1/5, Training Loss: 0.5952\n",
      "Epoch 1/5, Training Loss: 0.6727\n",
      "Epoch 1/5, Training Loss: 0.7094\n",
      "Epoch 1/5, Training Loss: 0.7855\n",
      "Epoch 1/5, Training Loss: 0.4499\n",
      "Epoch 1/5, Training Loss: 0.6095\n",
      "Epoch 1/5, Training Loss: 0.5470\n",
      "Epoch 1/5, Training Loss: 0.4726\n",
      "Epoch 1/5, Training Loss: 0.6320\n",
      "Epoch 1/5, Training Loss: 0.7478\n",
      "Epoch 1/5, Training Loss: 0.4353\n",
      "Epoch 1/5, Training Loss: 0.5859\n",
      "Epoch 1/5, Training Loss: 0.7227\n",
      "Epoch 1/5, Training Loss: 0.8165\n",
      "Epoch 1/5, Training Loss: 0.9017\n",
      "Epoch 1/5, Training Loss: 0.4847\n",
      "Epoch 1/5, Training Loss: 0.3771\n",
      "Epoch 1/5, Training Loss: 0.5678\n",
      "Epoch 1/5, Training Loss: 0.6257\n",
      "Epoch 1/5, Training Loss: 0.4644\n",
      "Epoch 1/5, Training Loss: 0.6602\n",
      "Epoch 1/5, Training Loss: 0.6505\n",
      "Epoch 1/5, Training Loss: 0.5081\n",
      "Epoch 1/5, Training Loss: 0.6069\n",
      "Epoch 1/5, Training Loss: 0.6107\n",
      "Epoch 1/5, Training Loss: 0.7361\n",
      "Epoch 1/5, Training Loss: 0.8162\n",
      "Epoch 1/5, Training Loss: 0.4578\n",
      "Epoch 1/5, Training Loss: 0.5434\n",
      "Epoch 1/5, Training Loss: 0.5207\n",
      "Epoch 1/5, Training Loss: 0.7391\n",
      "Epoch 1/5, Training Loss: 0.5360\n",
      "Epoch 1/5, Training Loss: 0.7429\n",
      "Epoch 1/5, Training Loss: 0.5864\n",
      "Epoch 1/5, Training Loss: 0.7476\n",
      "Epoch 1/5, Training Loss: 0.7476, Cross-Validation Loss: 0.5486\n",
      "Epoch 2/5, Training Loss: 0.4073\n",
      "Epoch 2/5, Training Loss: 0.3986\n",
      "Epoch 2/5, Training Loss: 0.5206\n",
      "Epoch 2/5, Training Loss: 0.3106\n",
      "Epoch 2/5, Training Loss: 0.4137\n",
      "Epoch 2/5, Training Loss: 0.5209\n",
      "Epoch 2/5, Training Loss: 0.3942\n",
      "Epoch 2/5, Training Loss: 0.5022\n",
      "Epoch 2/5, Training Loss: 0.3676\n",
      "Epoch 2/5, Training Loss: 0.6718\n",
      "Epoch 2/5, Training Loss: 0.6020\n",
      "Epoch 2/5, Training Loss: 0.4103\n",
      "Epoch 2/5, Training Loss: 0.4280\n",
      "Epoch 2/5, Training Loss: 0.5953\n",
      "Epoch 2/5, Training Loss: 0.4119\n",
      "Epoch 2/5, Training Loss: 0.3555\n",
      "Epoch 2/5, Training Loss: 0.7171\n",
      "Epoch 2/5, Training Loss: 0.7839\n",
      "Epoch 2/5, Training Loss: 0.9281\n",
      "Epoch 2/5, Training Loss: 0.2329\n",
      "Epoch 2/5, Training Loss: 0.4245\n",
      "Epoch 2/5, Training Loss: 0.5361\n",
      "Epoch 2/5, Training Loss: 0.2684\n",
      "Epoch 2/5, Training Loss: 0.7753\n",
      "Epoch 2/5, Training Loss: 0.4173\n",
      "Epoch 2/5, Training Loss: 0.7434\n",
      "Epoch 2/5, Training Loss: 0.2799\n",
      "Epoch 2/5, Training Loss: 0.4316\n",
      "Epoch 2/5, Training Loss: 0.4202\n",
      "Epoch 2/5, Training Loss: 0.5431\n",
      "Epoch 2/5, Training Loss: 0.4748\n",
      "Epoch 2/5, Training Loss: 0.4630\n",
      "Epoch 2/5, Training Loss: 0.5306\n",
      "Epoch 2/5, Training Loss: 0.4223\n",
      "Epoch 2/5, Training Loss: 0.3714\n",
      "Epoch 2/5, Training Loss: 0.4968\n",
      "Epoch 2/5, Training Loss: 0.7744\n",
      "Epoch 2/5, Training Loss: 0.5085\n",
      "Epoch 2/5, Training Loss: 0.4114\n",
      "Epoch 2/5, Training Loss: 0.3779\n",
      "Epoch 2/5, Training Loss: 0.3617\n",
      "Epoch 2/5, Training Loss: 0.5890\n",
      "Epoch 2/5, Training Loss: 0.4362\n",
      "Epoch 2/5, Training Loss: 0.4385\n",
      "Epoch 2/5, Training Loss: 0.5177\n",
      "Epoch 2/5, Training Loss: 0.4652\n",
      "Epoch 2/5, Training Loss: 0.7066\n",
      "Epoch 2/5, Training Loss: 0.4717\n",
      "Epoch 2/5, Training Loss: 0.5418\n",
      "Epoch 2/5, Training Loss: 0.3878\n",
      "Epoch 2/5, Training Loss: 0.3557\n",
      "Epoch 2/5, Training Loss: 0.5461\n",
      "Epoch 2/5, Training Loss: 0.8758\n",
      "Epoch 2/5, Training Loss: 0.4467\n",
      "Epoch 2/5, Training Loss: 0.5368\n",
      "Epoch 2/5, Training Loss: 0.9345\n",
      "Epoch 2/5, Training Loss: 0.4529\n",
      "Epoch 2/5, Training Loss: 0.4267\n",
      "Epoch 2/5, Training Loss: 0.3097\n",
      "Epoch 2/5, Training Loss: 0.5550\n",
      "Epoch 2/5, Training Loss: 0.7338\n",
      "Epoch 2/5, Training Loss: 0.3271\n",
      "Epoch 2/5, Training Loss: 0.8363\n",
      "Epoch 2/5, Training Loss: 0.5509\n",
      "Epoch 2/5, Training Loss: 0.5716\n",
      "Epoch 2/5, Training Loss: 0.3512\n",
      "Epoch 2/5, Training Loss: 0.4763\n",
      "Epoch 2/5, Training Loss: 0.5071\n",
      "Epoch 2/5, Training Loss: 0.3809\n",
      "Epoch 2/5, Training Loss: 0.3702\n",
      "Epoch 2/5, Training Loss: 0.2914\n",
      "Epoch 2/5, Training Loss: 0.8353\n",
      "Epoch 2/5, Training Loss: 0.5304\n",
      "Epoch 2/5, Training Loss: 0.5386\n",
      "Epoch 2/5, Training Loss: 0.5603\n",
      "Epoch 2/5, Training Loss: 0.8102\n",
      "Epoch 2/5, Training Loss: 0.7263\n",
      "Epoch 2/5, Training Loss: 0.4854\n",
      "Epoch 2/5, Training Loss: 0.4406\n",
      "Epoch 2/5, Training Loss: 0.5241\n",
      "Epoch 2/5, Training Loss: 0.4546\n",
      "Epoch 2/5, Training Loss: 0.5212\n",
      "Epoch 2/5, Training Loss: 0.5603\n",
      "Epoch 2/5, Training Loss: 0.6970\n",
      "Epoch 2/5, Training Loss: 0.5918\n",
      "Epoch 2/5, Training Loss: 0.4811\n",
      "Epoch 2/5, Training Loss: 0.3079\n",
      "Epoch 2/5, Training Loss: 0.4720\n",
      "Epoch 2/5, Training Loss: 0.5192\n",
      "Epoch 2/5, Training Loss: 0.4141\n",
      "Epoch 2/5, Training Loss: 0.5218\n",
      "Epoch 2/5, Training Loss: 0.2769\n",
      "Epoch 2/5, Training Loss: 0.4607\n",
      "Epoch 2/5, Training Loss: 0.4993\n",
      "Epoch 2/5, Training Loss: 0.5367\n",
      "Epoch 2/5, Training Loss: 0.5591\n",
      "Epoch 2/5, Training Loss: 0.6321\n",
      "Epoch 2/5, Training Loss: 0.4146\n",
      "Epoch 2/5, Training Loss: 0.3495\n",
      "Epoch 2/5, Training Loss: 0.3832\n",
      "Epoch 2/5, Training Loss: 0.3633\n",
      "Epoch 2/5, Training Loss: 0.5070\n",
      "Epoch 2/5, Training Loss: 0.5441\n",
      "Epoch 2/5, Training Loss: 0.4848\n",
      "Epoch 2/5, Training Loss: 0.7613\n",
      "Epoch 2/5, Training Loss: 0.3008\n",
      "Epoch 2/5, Training Loss: 0.5598\n",
      "Epoch 2/5, Training Loss: 0.4068\n",
      "Epoch 2/5, Training Loss: 0.2771\n",
      "Epoch 2/5, Training Loss: 0.2894\n",
      "Epoch 2/5, Training Loss: 0.4337\n",
      "Epoch 2/5, Training Loss: 0.5134\n",
      "Epoch 2/5, Training Loss: 0.5730\n",
      "Epoch 2/5, Training Loss: 0.4439\n",
      "Epoch 2/5, Training Loss: 0.5115\n",
      "Epoch 2/5, Training Loss: 0.3843\n",
      "Epoch 2/5, Training Loss: 0.7162\n",
      "Epoch 2/5, Training Loss: 0.4843\n",
      "Epoch 2/5, Training Loss: 0.4097\n",
      "Epoch 2/5, Training Loss: 0.3146\n",
      "Epoch 2/5, Training Loss: 0.3915\n",
      "Epoch 2/5, Training Loss: 0.4177\n",
      "Epoch 2/5, Training Loss: 0.3810\n",
      "Epoch 2/5, Training Loss: 0.3966\n",
      "Epoch 2/5, Training Loss: 0.2683\n",
      "Epoch 2/5, Training Loss: 0.3683\n",
      "Epoch 2/5, Training Loss: 0.3820\n",
      "Epoch 2/5, Training Loss: 0.3702\n",
      "Epoch 2/5, Training Loss: 0.4645\n",
      "Epoch 2/5, Training Loss: 0.5096\n",
      "Epoch 2/5, Training Loss: 0.3737\n",
      "Epoch 2/5, Training Loss: 0.2741\n",
      "Epoch 2/5, Training Loss: 0.3071\n",
      "Epoch 2/5, Training Loss: 0.5672\n",
      "Epoch 2/5, Training Loss: 0.2099\n",
      "Epoch 2/5, Training Loss: 0.5826\n",
      "Epoch 2/5, Training Loss: 0.5306\n",
      "Epoch 2/5, Training Loss: 0.5672\n",
      "Epoch 2/5, Training Loss: 0.2995\n",
      "Epoch 2/5, Training Loss: 0.3594\n",
      "Epoch 2/5, Training Loss: 0.3455\n",
      "Epoch 2/5, Training Loss: 0.5127\n",
      "Epoch 2/5, Training Loss: 0.4764\n",
      "Epoch 2/5, Training Loss: 0.3544\n",
      "Epoch 2/5, Training Loss: 0.2460\n",
      "Epoch 2/5, Training Loss: 0.4105\n",
      "Epoch 2/5, Training Loss: 0.5124\n",
      "Epoch 2/5, Training Loss: 0.5896\n",
      "Epoch 2/5, Training Loss: 0.5693\n",
      "Epoch 2/5, Training Loss: 0.5037\n",
      "Epoch 2/5, Training Loss: 0.4120\n",
      "Epoch 2/5, Training Loss: 0.4124\n",
      "Epoch 2/5, Training Loss: 0.4155\n",
      "Epoch 2/5, Training Loss: 0.4725\n",
      "Epoch 2/5, Training Loss: 0.5978\n",
      "Epoch 2/5, Training Loss: 0.6361\n",
      "Epoch 2/5, Training Loss: 0.3411\n",
      "Epoch 2/5, Training Loss: 0.5358\n",
      "Epoch 2/5, Training Loss: 0.7115\n",
      "Epoch 2/5, Training Loss: 0.4502\n",
      "Epoch 2/5, Training Loss: 0.5795\n",
      "Epoch 2/5, Training Loss: 0.3014\n",
      "Epoch 2/5, Training Loss: 0.4051\n",
      "Epoch 2/5, Training Loss: 0.5695\n",
      "Epoch 2/5, Training Loss: 0.3511\n",
      "Epoch 2/5, Training Loss: 0.3399\n",
      "Epoch 2/5, Training Loss: 0.3701\n",
      "Epoch 2/5, Training Loss: 0.4150\n",
      "Epoch 2/5, Training Loss: 0.7487\n",
      "Epoch 2/5, Training Loss: 0.4814\n",
      "Epoch 2/5, Training Loss: 0.4511\n",
      "Epoch 2/5, Training Loss: 0.4086\n",
      "Epoch 2/5, Training Loss: 0.5758\n",
      "Epoch 2/5, Training Loss: 0.4046\n",
      "Epoch 2/5, Training Loss: 0.3274\n",
      "Epoch 2/5, Training Loss: 0.4414\n",
      "Epoch 2/5, Training Loss: 0.4533\n",
      "Epoch 2/5, Training Loss: 0.3912\n",
      "Epoch 2/5, Training Loss: 0.5510\n",
      "Epoch 2/5, Training Loss: 0.5401\n",
      "Epoch 2/5, Training Loss: 0.4282\n",
      "Epoch 2/5, Training Loss: 0.3843\n",
      "Epoch 2/5, Training Loss: 0.6188\n",
      "Epoch 2/5, Training Loss: 0.3925\n",
      "Epoch 2/5, Training Loss: 0.5553\n",
      "Epoch 2/5, Training Loss: 0.5257\n",
      "Epoch 2/5, Training Loss: 0.4715\n",
      "Epoch 2/5, Training Loss: 0.3561\n",
      "Epoch 2/5, Training Loss: 0.5352\n",
      "Epoch 2/5, Training Loss: 0.6085\n",
      "Epoch 2/5, Training Loss: 0.4383\n",
      "Epoch 2/5, Training Loss: 0.3278\n",
      "Epoch 2/5, Training Loss: 0.3434\n",
      "Epoch 2/5, Training Loss: 0.5181\n",
      "Epoch 2/5, Training Loss: 0.6018\n",
      "Epoch 2/5, Training Loss: 0.5590\n",
      "Epoch 2/5, Training Loss: 0.4460\n",
      "Epoch 2/5, Training Loss: 0.6054\n",
      "Epoch 2/5, Training Loss: 0.5204\n",
      "Epoch 2/5, Training Loss: 0.7328\n",
      "Epoch 2/5, Training Loss: 0.3010\n",
      "Epoch 2/5, Training Loss: 0.3796\n",
      "Epoch 2/5, Training Loss: 0.3839\n",
      "Epoch 2/5, Training Loss: 0.4877\n",
      "Epoch 2/5, Training Loss: 0.5192\n",
      "Epoch 2/5, Training Loss: 0.6394\n",
      "Epoch 2/5, Training Loss: 0.2476\n",
      "Epoch 2/5, Training Loss: 0.7358\n",
      "Epoch 2/5, Training Loss: 0.4347\n",
      "Epoch 2/5, Training Loss: 0.5021\n",
      "Epoch 2/5, Training Loss: 0.4066\n",
      "Epoch 2/5, Training Loss: 0.5147\n",
      "Epoch 2/5, Training Loss: 0.4103\n",
      "Epoch 2/5, Training Loss: 0.2614\n",
      "Epoch 2/5, Training Loss: 0.5080\n",
      "Epoch 2/5, Training Loss: 0.4570\n",
      "Epoch 2/5, Training Loss: 0.3391\n",
      "Epoch 2/5, Training Loss: 0.7610\n",
      "Epoch 2/5, Training Loss: 0.4919\n",
      "Epoch 2/5, Training Loss: 0.6930\n",
      "Epoch 2/5, Training Loss: 0.3911\n",
      "Epoch 2/5, Training Loss: 0.4567\n",
      "Epoch 2/5, Training Loss: 0.5979\n",
      "Epoch 2/5, Training Loss: 0.4927\n",
      "Epoch 2/5, Training Loss: 0.3919\n",
      "Epoch 2/5, Training Loss: 0.4000\n",
      "Epoch 2/5, Training Loss: 0.4890\n",
      "Epoch 2/5, Training Loss: 0.3546\n",
      "Epoch 2/5, Training Loss: 0.3356\n",
      "Epoch 2/5, Training Loss: 0.4620\n",
      "Epoch 2/5, Training Loss: 0.5308\n",
      "Epoch 2/5, Training Loss: 0.6669\n",
      "Epoch 2/5, Training Loss: 0.4780\n",
      "Epoch 2/5, Training Loss: 0.5719\n",
      "Epoch 2/5, Training Loss: 0.2829\n",
      "Epoch 2/5, Training Loss: 0.4812\n",
      "Epoch 2/5, Training Loss: 0.6073\n",
      "Epoch 2/5, Training Loss: 0.7944\n",
      "Epoch 2/5, Training Loss: 0.3878\n",
      "Epoch 2/5, Training Loss: 0.4203\n",
      "Epoch 2/5, Training Loss: 0.3899\n",
      "Epoch 2/5, Training Loss: 0.6553\n",
      "Epoch 2/5, Training Loss: 0.5185\n",
      "Epoch 2/5, Training Loss: 0.3549\n",
      "Epoch 2/5, Training Loss: 0.8354\n",
      "Epoch 2/5, Training Loss: 0.4766\n",
      "Epoch 2/5, Training Loss: 0.4209\n",
      "Epoch 2/5, Training Loss: 0.4342\n",
      "Epoch 2/5, Training Loss: 0.5350\n",
      "Epoch 2/5, Training Loss: 0.3038\n",
      "Epoch 2/5, Training Loss: 0.3560\n",
      "Epoch 2/5, Training Loss: 0.4466\n",
      "Epoch 2/5, Training Loss: 0.5380\n",
      "Epoch 2/5, Training Loss: 0.4175\n",
      "Epoch 2/5, Training Loss: 0.3488\n",
      "Epoch 2/5, Training Loss: 0.4120\n",
      "Epoch 2/5, Training Loss: 0.4334\n",
      "Epoch 2/5, Training Loss: 0.5172\n",
      "Epoch 2/5, Training Loss: 0.7255\n",
      "Epoch 2/5, Training Loss: 0.3646\n",
      "Epoch 2/5, Training Loss: 0.3656\n",
      "Epoch 2/5, Training Loss: 0.5167\n",
      "Epoch 2/5, Training Loss: 0.4592\n",
      "Epoch 2/5, Training Loss: 0.7323\n",
      "Epoch 2/5, Training Loss: 0.3332\n",
      "Epoch 2/5, Training Loss: 0.4916\n",
      "Epoch 2/5, Training Loss: 0.5063\n",
      "Epoch 2/5, Training Loss: 0.4107\n",
      "Epoch 2/5, Training Loss: 0.4494\n",
      "Epoch 2/5, Training Loss: 0.4711\n",
      "Epoch 2/5, Training Loss: 0.2664\n",
      "Epoch 2/5, Training Loss: 0.7081\n",
      "Epoch 2/5, Training Loss: 0.3718\n",
      "Epoch 2/5, Training Loss: 0.4726\n",
      "Epoch 2/5, Training Loss: 0.7160\n",
      "Epoch 2/5, Training Loss: 0.4665\n",
      "Epoch 2/5, Training Loss: 0.5171\n",
      "Epoch 2/5, Training Loss: 0.5535\n",
      "Epoch 2/5, Training Loss: 0.4561\n",
      "Epoch 2/5, Training Loss: 0.5202\n",
      "Epoch 2/5, Training Loss: 0.7801\n",
      "Epoch 2/5, Training Loss: 0.7758\n",
      "Epoch 2/5, Training Loss: 0.3446\n",
      "Epoch 2/5, Training Loss: 0.4387\n",
      "Epoch 2/5, Training Loss: 0.5108\n",
      "Epoch 2/5, Training Loss: 0.3788\n",
      "Epoch 2/5, Training Loss: 0.3861\n",
      "Epoch 2/5, Training Loss: 0.3548\n",
      "Epoch 2/5, Training Loss: 0.3975\n",
      "Epoch 2/5, Training Loss: 0.3065\n",
      "Epoch 2/5, Training Loss: 0.6126\n",
      "Epoch 2/5, Training Loss: 0.3552\n",
      "Epoch 2/5, Training Loss: 0.4839\n",
      "Epoch 2/5, Training Loss: 0.4319\n",
      "Epoch 2/5, Training Loss: 0.5843\n",
      "Epoch 2/5, Training Loss: 0.4700\n",
      "Epoch 2/5, Training Loss: 0.3681\n",
      "Epoch 2/5, Training Loss: 0.4193\n",
      "Epoch 2/5, Training Loss: 0.6729\n",
      "Epoch 2/5, Training Loss: 0.6203\n",
      "Epoch 2/5, Training Loss: 0.3803\n",
      "Epoch 2/5, Training Loss: 0.3386\n",
      "Epoch 2/5, Training Loss: 0.4861\n",
      "Epoch 2/5, Training Loss: 0.3984\n",
      "Epoch 2/5, Training Loss: 0.5360\n",
      "Epoch 2/5, Training Loss: 0.6064\n",
      "Epoch 2/5, Training Loss: 0.5702\n",
      "Epoch 2/5, Training Loss: 0.3537\n",
      "Epoch 2/5, Training Loss: 0.5197\n",
      "Epoch 2/5, Training Loss: 0.3714\n",
      "Epoch 2/5, Training Loss: 0.6181\n",
      "Epoch 2/5, Training Loss: 0.4459\n",
      "Epoch 2/5, Training Loss: 0.5381\n",
      "Epoch 2/5, Training Loss: 0.5786\n",
      "Epoch 2/5, Training Loss: 0.4781\n",
      "Epoch 2/5, Training Loss: 0.5906\n",
      "Epoch 2/5, Training Loss: 0.4182\n",
      "Epoch 2/5, Training Loss: 0.3847\n",
      "Epoch 2/5, Training Loss: 0.3617\n",
      "Epoch 2/5, Training Loss: 0.5199\n",
      "Epoch 2/5, Training Loss: 0.3163\n",
      "Epoch 2/5, Training Loss: 0.3401\n",
      "Epoch 2/5, Training Loss: 0.6232\n",
      "Epoch 2/5, Training Loss: 0.4158\n",
      "Epoch 2/5, Training Loss: 0.6613\n",
      "Epoch 2/5, Training Loss: 0.5222\n",
      "Epoch 2/5, Training Loss: 0.4437\n",
      "Epoch 2/5, Training Loss: 0.4284\n",
      "Epoch 2/5, Training Loss: 0.4250\n",
      "Epoch 2/5, Training Loss: 0.3992\n",
      "Epoch 2/5, Training Loss: 0.6561\n",
      "Epoch 2/5, Training Loss: 0.4973\n",
      "Epoch 2/5, Training Loss: 0.7482\n",
      "Epoch 2/5, Training Loss: 0.3779\n",
      "Epoch 2/5, Training Loss: 0.5325\n",
      "Epoch 2/5, Training Loss: 0.5289\n",
      "Epoch 2/5, Training Loss: 0.2816\n",
      "Epoch 2/5, Training Loss: 0.6336\n",
      "Epoch 2/5, Training Loss: 0.4955\n",
      "Epoch 2/5, Training Loss: 0.4356\n",
      "Epoch 2/5, Training Loss: 0.5743\n",
      "Epoch 2/5, Training Loss: 0.7056\n",
      "Epoch 2/5, Training Loss: 0.5205\n",
      "Epoch 2/5, Training Loss: 0.2488\n",
      "Epoch 2/5, Training Loss: 0.5990\n",
      "Epoch 2/5, Training Loss: 0.4369\n",
      "Epoch 2/5, Training Loss: 0.6124\n",
      "Epoch 2/5, Training Loss: 0.5111\n",
      "Epoch 2/5, Training Loss: 0.3604\n",
      "Epoch 2/5, Training Loss: 0.5352\n",
      "Epoch 2/5, Training Loss: 0.6656\n",
      "Epoch 2/5, Training Loss: 0.3968\n",
      "Epoch 2/5, Training Loss: 0.5629\n",
      "Epoch 2/5, Training Loss: 0.3409\n",
      "Epoch 2/5, Training Loss: 0.5905\n",
      "Epoch 2/5, Training Loss: 0.2978\n",
      "Epoch 2/5, Training Loss: 0.4142\n",
      "Epoch 2/5, Training Loss: 0.4768\n",
      "Epoch 2/5, Training Loss: 0.4137\n",
      "Epoch 2/5, Training Loss: 0.3796\n",
      "Epoch 2/5, Training Loss: 0.5369\n",
      "Epoch 2/5, Training Loss: 0.4004\n",
      "Epoch 2/5, Training Loss: 0.6432\n",
      "Epoch 2/5, Training Loss: 0.2943\n",
      "Epoch 2/5, Training Loss: 0.4505\n",
      "Epoch 2/5, Training Loss: 0.4511\n",
      "Epoch 2/5, Training Loss: 0.4810\n",
      "Epoch 2/5, Training Loss: 0.5052\n",
      "Epoch 2/5, Training Loss: 0.3777\n",
      "Epoch 2/5, Training Loss: 0.3981\n",
      "Epoch 2/5, Training Loss: 0.3144\n",
      "Epoch 2/5, Training Loss: 0.3647\n",
      "Epoch 2/5, Training Loss: 0.5842\n",
      "Epoch 2/5, Training Loss: 0.5221\n",
      "Epoch 2/5, Training Loss: 0.4710\n",
      "Epoch 2/5, Training Loss: 0.2395\n",
      "Epoch 2/5, Training Loss: 0.4957\n",
      "Epoch 2/5, Training Loss: 0.5652\n",
      "Epoch 2/5, Training Loss: 0.4154\n",
      "Epoch 2/5, Training Loss: 0.3936\n",
      "Epoch 2/5, Training Loss: 0.4341\n",
      "Epoch 2/5, Training Loss: 0.5678\n",
      "Epoch 2/5, Training Loss: 0.5247\n",
      "Epoch 2/5, Training Loss: 0.5241\n",
      "Epoch 2/5, Training Loss: 0.4239\n",
      "Epoch 2/5, Training Loss: 0.2172\n",
      "Epoch 2/5, Training Loss: 0.3502\n",
      "Epoch 2/5, Training Loss: 0.3162\n",
      "Epoch 2/5, Training Loss: 0.4024\n",
      "Epoch 2/5, Training Loss: 0.4853\n",
      "Epoch 2/5, Training Loss: 0.5857\n",
      "Epoch 2/5, Training Loss: 0.5000\n",
      "Epoch 2/5, Training Loss: 0.4202\n",
      "Epoch 2/5, Training Loss: 0.4014\n",
      "Epoch 2/5, Training Loss: 0.3106\n",
      "Epoch 2/5, Training Loss: 0.4988\n",
      "Epoch 2/5, Training Loss: 0.6047\n",
      "Epoch 2/5, Training Loss: 0.5784\n",
      "Epoch 2/5, Training Loss: 0.3967\n",
      "Epoch 2/5, Training Loss: 0.3360\n",
      "Epoch 2/5, Training Loss: 0.3625\n",
      "Epoch 2/5, Training Loss: 0.6483\n",
      "Epoch 2/5, Training Loss: 0.4458\n",
      "Epoch 2/5, Training Loss: 0.4873\n",
      "Epoch 2/5, Training Loss: 0.6017\n",
      "Epoch 2/5, Training Loss: 0.4314\n",
      "Epoch 2/5, Training Loss: 0.3850\n",
      "Epoch 2/5, Training Loss: 0.4091\n",
      "Epoch 2/5, Training Loss: 0.4456\n",
      "Epoch 2/5, Training Loss: 0.3741\n",
      "Epoch 2/5, Training Loss: 0.2494\n",
      "Epoch 2/5, Training Loss: 0.4309\n",
      "Epoch 2/5, Training Loss: 0.4799\n",
      "Epoch 2/5, Training Loss: 0.5752\n",
      "Epoch 2/5, Training Loss: 0.4870\n",
      "Epoch 2/5, Training Loss: 0.4006\n",
      "Epoch 2/5, Training Loss: 0.6371\n",
      "Epoch 2/5, Training Loss: 0.5256\n",
      "Epoch 2/5, Training Loss: 0.7217\n",
      "Epoch 2/5, Training Loss: 0.5806\n",
      "Epoch 2/5, Training Loss: 0.3412\n",
      "Epoch 2/5, Training Loss: 0.5179\n",
      "Epoch 2/5, Training Loss: 0.2466\n",
      "Epoch 2/5, Training Loss: 0.3895\n",
      "Epoch 2/5, Training Loss: 0.4319\n",
      "Epoch 2/5, Training Loss: 0.4713\n",
      "Epoch 2/5, Training Loss: 0.3407\n",
      "Epoch 2/5, Training Loss: 0.3059\n",
      "Epoch 2/5, Training Loss: 0.4427\n",
      "Epoch 2/5, Training Loss: 0.3140\n",
      "Epoch 2/5, Training Loss: 0.3707\n",
      "Epoch 2/5, Training Loss: 0.3966\n",
      "Epoch 2/5, Training Loss: 0.3926\n",
      "Epoch 2/5, Training Loss: 0.4341\n",
      "Epoch 2/5, Training Loss: 0.6068\n",
      "Epoch 2/5, Training Loss: 0.4409\n",
      "Epoch 2/5, Training Loss: 0.3177\n",
      "Epoch 2/5, Training Loss: 0.6353\n",
      "Epoch 2/5, Training Loss: 0.5899\n",
      "Epoch 2/5, Training Loss: 0.4602\n",
      "Epoch 2/5, Training Loss: 0.3353\n",
      "Epoch 2/5, Training Loss: 0.2448\n",
      "Epoch 2/5, Training Loss: 0.4776\n",
      "Epoch 2/5, Training Loss: 0.2783\n",
      "Epoch 2/5, Training Loss: 0.5872\n",
      "Epoch 2/5, Training Loss: 0.4653\n",
      "Epoch 2/5, Training Loss: 0.4537\n",
      "Epoch 2/5, Training Loss: 0.6448\n",
      "Epoch 2/5, Training Loss: 0.3722\n",
      "Epoch 2/5, Training Loss: 0.4964\n",
      "Epoch 2/5, Training Loss: 0.6718\n",
      "Epoch 2/5, Training Loss: 0.4298\n",
      "Epoch 2/5, Training Loss: 0.3670\n",
      "Epoch 2/5, Training Loss: 0.4623\n",
      "Epoch 2/5, Training Loss: 0.3774\n",
      "Epoch 2/5, Training Loss: 0.3316\n",
      "Epoch 2/5, Training Loss: 0.4360\n",
      "Epoch 2/5, Training Loss: 0.2620\n",
      "Epoch 2/5, Training Loss: 0.7025\n",
      "Epoch 2/5, Training Loss: 0.3885\n",
      "Epoch 2/5, Training Loss: 0.5266\n",
      "Epoch 2/5, Training Loss: 0.4117\n",
      "Epoch 2/5, Training Loss: 0.5121\n",
      "Epoch 2/5, Training Loss: 0.6780\n",
      "Epoch 2/5, Training Loss: 0.3393\n",
      "Epoch 2/5, Training Loss: 0.3582\n",
      "Epoch 2/5, Training Loss: 0.4897\n",
      "Epoch 2/5, Training Loss: 0.4454\n",
      "Epoch 2/5, Training Loss: 0.2289\n",
      "Epoch 2/5, Training Loss: 0.3546\n",
      "Epoch 2/5, Training Loss: 0.6362\n",
      "Epoch 2/5, Training Loss: 0.4166\n",
      "Epoch 2/5, Training Loss: 0.3803\n",
      "Epoch 2/5, Training Loss: 0.4146\n",
      "Epoch 2/5, Training Loss: 0.4247\n",
      "Epoch 2/5, Training Loss: 0.5062\n",
      "Epoch 2/5, Training Loss: 0.4164\n",
      "Epoch 2/5, Training Loss: 0.3561\n",
      "Epoch 2/5, Training Loss: 0.5152\n",
      "Epoch 2/5, Training Loss: 0.3637\n",
      "Epoch 2/5, Training Loss: 0.5425\n",
      "Epoch 2/5, Training Loss: 0.2819\n",
      "Epoch 2/5, Training Loss: 0.5692\n",
      "Epoch 2/5, Training Loss: 0.3097\n",
      "Epoch 2/5, Training Loss: 0.2410\n",
      "Epoch 2/5, Training Loss: 0.4927\n",
      "Epoch 2/5, Training Loss: 0.4527\n",
      "Epoch 2/5, Training Loss: 0.5880\n",
      "Epoch 2/5, Training Loss: 0.5059\n",
      "Epoch 2/5, Training Loss: 0.4172\n",
      "Epoch 2/5, Training Loss: 0.2428\n",
      "Epoch 2/5, Training Loss: 0.3568\n",
      "Epoch 2/5, Training Loss: 0.1565\n",
      "Epoch 2/5, Training Loss: 0.3531\n",
      "Epoch 2/5, Training Loss: 0.5739\n",
      "Epoch 2/5, Training Loss: 0.4960\n",
      "Epoch 2/5, Training Loss: 0.4363\n",
      "Epoch 2/5, Training Loss: 0.4188\n",
      "Epoch 2/5, Training Loss: 0.3551\n",
      "Epoch 2/5, Training Loss: 0.3360\n",
      "Epoch 2/5, Training Loss: 0.3936\n",
      "Epoch 2/5, Training Loss: 0.4461\n",
      "Epoch 2/5, Training Loss: 0.5849\n",
      "Epoch 2/5, Training Loss: 0.4528\n",
      "Epoch 2/5, Training Loss: 0.4857\n",
      "Epoch 2/5, Training Loss: 0.3694\n",
      "Epoch 2/5, Training Loss: 0.5154\n",
      "Epoch 2/5, Training Loss: 0.5939\n",
      "Epoch 2/5, Training Loss: 0.4951\n",
      "Epoch 2/5, Training Loss: 0.3630\n",
      "Epoch 2/5, Training Loss: 0.5629\n",
      "Epoch 2/5, Training Loss: 0.2139\n",
      "Epoch 2/5, Training Loss: 0.3109\n",
      "Epoch 2/5, Training Loss: 0.3450\n",
      "Epoch 2/5, Training Loss: 0.4001\n",
      "Epoch 2/5, Training Loss: 0.3972\n",
      "Epoch 2/5, Training Loss: 0.5256\n",
      "Epoch 2/5, Training Loss: 0.4218\n",
      "Epoch 2/5, Training Loss: 0.2670\n",
      "Epoch 2/5, Training Loss: 0.4748\n",
      "Epoch 2/5, Training Loss: 0.5274\n",
      "Epoch 2/5, Training Loss: 0.4380\n",
      "Epoch 2/5, Training Loss: 0.4706\n",
      "Epoch 2/5, Training Loss: 1.0835\n",
      "Epoch 2/5, Training Loss: 0.3636\n",
      "Epoch 2/5, Training Loss: 0.3042\n",
      "Epoch 2/5, Training Loss: 0.6110\n",
      "Epoch 2/5, Training Loss: 0.3651\n",
      "Epoch 2/5, Training Loss: 0.3338\n",
      "Epoch 2/5, Training Loss: 0.3401\n",
      "Epoch 2/5, Training Loss: 0.3116\n",
      "Epoch 2/5, Training Loss: 0.3770\n",
      "Epoch 2/5, Training Loss: 0.5185\n",
      "Epoch 2/5, Training Loss: 0.5733\n",
      "Epoch 2/5, Training Loss: 0.4654\n",
      "Epoch 2/5, Training Loss: 0.4480\n",
      "Epoch 2/5, Training Loss: 0.4224\n",
      "Epoch 2/5, Training Loss: 0.3409\n",
      "Epoch 2/5, Training Loss: 0.2922\n",
      "Epoch 2/5, Training Loss: 0.3656\n",
      "Epoch 2/5, Training Loss: 0.6460\n",
      "Epoch 2/5, Training Loss: 0.5302\n",
      "Epoch 2/5, Training Loss: 0.5642\n",
      "Epoch 2/5, Training Loss: 0.8022\n",
      "Epoch 2/5, Training Loss: 0.2181\n",
      "Epoch 2/5, Training Loss: 0.2614\n",
      "Epoch 2/5, Training Loss: 0.6385\n",
      "Epoch 2/5, Training Loss: 0.4251\n",
      "Epoch 2/5, Training Loss: 0.3919\n",
      "Epoch 2/5, Training Loss: 0.5846\n",
      "Epoch 2/5, Training Loss: 0.3377\n",
      "Epoch 2/5, Training Loss: 0.5939\n",
      "Epoch 2/5, Training Loss: 0.5177\n",
      "Epoch 2/5, Training Loss: 0.5180\n",
      "Epoch 2/5, Training Loss: 0.5161\n",
      "Epoch 2/5, Training Loss: 0.4424\n",
      "Epoch 2/5, Training Loss: 0.4336\n",
      "Epoch 2/5, Training Loss: 0.4345\n",
      "Epoch 2/5, Training Loss: 0.4261\n",
      "Epoch 2/5, Training Loss: 0.3925\n",
      "Epoch 2/5, Training Loss: 0.5044\n",
      "Epoch 2/5, Training Loss: 0.4743\n",
      "Epoch 2/5, Training Loss: 0.3697\n",
      "Epoch 2/5, Training Loss: 0.5309\n",
      "Epoch 2/5, Training Loss: 0.4545\n",
      "Epoch 2/5, Training Loss: 0.2733\n",
      "Epoch 2/5, Training Loss: 0.5404\n",
      "Epoch 2/5, Training Loss: 0.5183\n",
      "Epoch 2/5, Training Loss: 0.6667\n",
      "Epoch 2/5, Training Loss: 0.5981\n",
      "Epoch 2/5, Training Loss: 0.5248\n",
      "Epoch 2/5, Training Loss: 0.5350\n",
      "Epoch 2/5, Training Loss: 0.5349\n",
      "Epoch 2/5, Training Loss: 0.3147\n",
      "Epoch 2/5, Training Loss: 0.4354\n",
      "Epoch 2/5, Training Loss: 0.4675\n",
      "Epoch 2/5, Training Loss: 0.3490\n",
      "Epoch 2/5, Training Loss: 0.3208\n",
      "Epoch 2/5, Training Loss: 0.5451\n",
      "Epoch 2/5, Training Loss: 0.3640\n",
      "Epoch 2/5, Training Loss: 0.2283\n",
      "Epoch 2/5, Training Loss: 0.4611\n",
      "Epoch 2/5, Training Loss: 0.2262\n",
      "Epoch 2/5, Training Loss: 0.4543\n",
      "Epoch 2/5, Training Loss: 0.5088\n",
      "Epoch 2/5, Training Loss: 0.5517\n",
      "Epoch 2/5, Training Loss: 0.3547\n",
      "Epoch 2/5, Training Loss: 0.2883\n",
      "Epoch 2/5, Training Loss: 0.6550\n",
      "Epoch 2/5, Training Loss: 0.3279\n",
      "Epoch 2/5, Training Loss: 0.4643\n",
      "Epoch 2/5, Training Loss: 0.5219\n",
      "Epoch 2/5, Training Loss: 0.3040\n",
      "Epoch 2/5, Training Loss: 0.3660\n",
      "Epoch 2/5, Training Loss: 0.4492\n",
      "Epoch 2/5, Training Loss: 0.4716\n",
      "Epoch 2/5, Training Loss: 0.3450\n",
      "Epoch 2/5, Training Loss: 0.5835\n",
      "Epoch 2/5, Training Loss: 0.2758\n",
      "Epoch 2/5, Training Loss: 0.3997\n",
      "Epoch 2/5, Training Loss: 0.1351\n",
      "Epoch 2/5, Training Loss: 0.4161\n",
      "Epoch 2/5, Training Loss: 0.1967\n",
      "Epoch 2/5, Training Loss: 0.3035\n",
      "Epoch 2/5, Training Loss: 0.4084\n",
      "Epoch 2/5, Training Loss: 0.5660\n",
      "Epoch 2/5, Training Loss: 0.4171\n",
      "Epoch 2/5, Training Loss: 0.3278\n",
      "Epoch 2/5, Training Loss: 0.5935\n",
      "Epoch 2/5, Training Loss: 0.2922\n",
      "Epoch 2/5, Training Loss: 0.4190\n",
      "Epoch 2/5, Training Loss: 0.3147\n",
      "Epoch 2/5, Training Loss: 0.5161\n",
      "Epoch 2/5, Training Loss: 0.5321\n",
      "Epoch 2/5, Training Loss: 0.5419\n",
      "Epoch 2/5, Training Loss: 0.5211\n",
      "Epoch 2/5, Training Loss: 0.4268\n",
      "Epoch 2/5, Training Loss: 0.7079\n",
      "Epoch 2/5, Training Loss: 0.4635\n",
      "Epoch 2/5, Training Loss: 0.4832\n",
      "Epoch 2/5, Training Loss: 0.4382\n",
      "Epoch 2/5, Training Loss: 0.4052\n",
      "Epoch 2/5, Training Loss: 0.4765\n",
      "Epoch 2/5, Training Loss: 0.1877\n",
      "Epoch 2/5, Training Loss: 0.4432\n",
      "Epoch 2/5, Training Loss: 0.4432, Cross-Validation Loss: 0.5229\n",
      "Epoch 3/5, Training Loss: 0.3488\n",
      "Epoch 3/5, Training Loss: 0.3473\n",
      "Epoch 3/5, Training Loss: 0.3163\n",
      "Epoch 3/5, Training Loss: 0.3299\n",
      "Epoch 3/5, Training Loss: 0.3109\n",
      "Epoch 3/5, Training Loss: 0.2180\n",
      "Epoch 3/5, Training Loss: 0.2719\n",
      "Epoch 3/5, Training Loss: 0.2573\n",
      "Epoch 3/5, Training Loss: 0.3047\n",
      "Epoch 3/5, Training Loss: 0.1981\n",
      "Epoch 3/5, Training Loss: 0.4746\n",
      "Epoch 3/5, Training Loss: 0.4820\n",
      "Epoch 3/5, Training Loss: 0.2636\n",
      "Epoch 3/5, Training Loss: 0.2635\n",
      "Epoch 3/5, Training Loss: 0.2785\n",
      "Epoch 3/5, Training Loss: 0.4443\n",
      "Epoch 3/5, Training Loss: 0.4777\n",
      "Epoch 3/5, Training Loss: 0.4107\n",
      "Epoch 3/5, Training Loss: 0.2439\n",
      "Epoch 3/5, Training Loss: 0.1950\n",
      "Epoch 3/5, Training Loss: 0.4013\n",
      "Epoch 3/5, Training Loss: 0.1389\n",
      "Epoch 3/5, Training Loss: 0.3576\n",
      "Epoch 3/5, Training Loss: 0.4335\n",
      "Epoch 3/5, Training Loss: 0.3387\n",
      "Epoch 3/5, Training Loss: 0.1034\n",
      "Epoch 3/5, Training Loss: 0.1648\n",
      "Epoch 3/5, Training Loss: 0.1071\n",
      "Epoch 3/5, Training Loss: 0.2238\n",
      "Epoch 3/5, Training Loss: 0.3086\n",
      "Epoch 3/5, Training Loss: 0.2206\n",
      "Epoch 3/5, Training Loss: 0.2118\n",
      "Epoch 3/5, Training Loss: 0.2703\n",
      "Epoch 3/5, Training Loss: 0.2473\n",
      "Epoch 3/5, Training Loss: 0.2554\n",
      "Epoch 3/5, Training Loss: 0.3364\n",
      "Epoch 3/5, Training Loss: 0.3066\n",
      "Epoch 3/5, Training Loss: 0.2915\n",
      "Epoch 3/5, Training Loss: 0.4203\n",
      "Epoch 3/5, Training Loss: 0.1513\n",
      "Epoch 3/5, Training Loss: 0.4221\n",
      "Epoch 3/5, Training Loss: 0.2967\n",
      "Epoch 3/5, Training Loss: 0.2651\n",
      "Epoch 3/5, Training Loss: 0.1889\n",
      "Epoch 3/5, Training Loss: 0.1766\n",
      "Epoch 3/5, Training Loss: 0.3926\n",
      "Epoch 3/5, Training Loss: 0.2197\n",
      "Epoch 3/5, Training Loss: 0.2638\n",
      "Epoch 3/5, Training Loss: 0.1292\n",
      "Epoch 3/5, Training Loss: 0.3485\n",
      "Epoch 3/5, Training Loss: 0.2957\n",
      "Epoch 3/5, Training Loss: 0.2901\n",
      "Epoch 3/5, Training Loss: 0.3590\n",
      "Epoch 3/5, Training Loss: 0.1861\n",
      "Epoch 3/5, Training Loss: 0.1868\n",
      "Epoch 3/5, Training Loss: 0.5829\n",
      "Epoch 3/5, Training Loss: 0.2525\n",
      "Epoch 3/5, Training Loss: 0.3205\n",
      "Epoch 3/5, Training Loss: 0.3375\n",
      "Epoch 3/5, Training Loss: 0.2649\n",
      "Epoch 3/5, Training Loss: 0.3144\n",
      "Epoch 3/5, Training Loss: 0.4291\n",
      "Epoch 3/5, Training Loss: 0.2641\n",
      "Epoch 3/5, Training Loss: 0.1932\n",
      "Epoch 3/5, Training Loss: 0.3309\n",
      "Epoch 3/5, Training Loss: 0.3994\n",
      "Epoch 3/5, Training Loss: 0.3498\n",
      "Epoch 3/5, Training Loss: 0.2803\n",
      "Epoch 3/5, Training Loss: 0.1503\n",
      "Epoch 3/5, Training Loss: 0.2151\n",
      "Epoch 3/5, Training Loss: 0.3635\n",
      "Epoch 3/5, Training Loss: 0.1796\n",
      "Epoch 3/5, Training Loss: 0.3286\n",
      "Epoch 3/5, Training Loss: 0.6996\n",
      "Epoch 3/5, Training Loss: 0.2556\n",
      "Epoch 3/5, Training Loss: 0.4606\n",
      "Epoch 3/5, Training Loss: 0.3746\n",
      "Epoch 3/5, Training Loss: 0.1425\n",
      "Epoch 3/5, Training Loss: 0.4440\n",
      "Epoch 3/5, Training Loss: 0.3595\n",
      "Epoch 3/5, Training Loss: 0.1888\n",
      "Epoch 3/5, Training Loss: 0.3427\n",
      "Epoch 3/5, Training Loss: 0.3682\n",
      "Epoch 3/5, Training Loss: 0.3186\n",
      "Epoch 3/5, Training Loss: 0.3465\n",
      "Epoch 3/5, Training Loss: 0.2515\n",
      "Epoch 3/5, Training Loss: 0.1344\n",
      "Epoch 3/5, Training Loss: 0.2475\n",
      "Epoch 3/5, Training Loss: 0.3035\n",
      "Epoch 3/5, Training Loss: 0.3195\n",
      "Epoch 3/5, Training Loss: 0.4993\n",
      "Epoch 3/5, Training Loss: 0.3562\n",
      "Epoch 3/5, Training Loss: 0.2301\n",
      "Epoch 3/5, Training Loss: 0.2525\n",
      "Epoch 3/5, Training Loss: 0.4332\n",
      "Epoch 3/5, Training Loss: 0.2106\n",
      "Epoch 3/5, Training Loss: 0.3126\n",
      "Epoch 3/5, Training Loss: 0.5267\n",
      "Epoch 3/5, Training Loss: 0.3137\n",
      "Epoch 3/5, Training Loss: 0.3109\n",
      "Epoch 3/5, Training Loss: 0.2166\n",
      "Epoch 3/5, Training Loss: 0.3051\n",
      "Epoch 3/5, Training Loss: 0.2312\n",
      "Epoch 3/5, Training Loss: 0.3073\n",
      "Epoch 3/5, Training Loss: 0.4087\n",
      "Epoch 3/5, Training Loss: 0.4074\n",
      "Epoch 3/5, Training Loss: 0.3888\n",
      "Epoch 3/5, Training Loss: 0.2076\n",
      "Epoch 3/5, Training Loss: 0.3857\n",
      "Epoch 3/5, Training Loss: 0.3116\n",
      "Epoch 3/5, Training Loss: 0.1741\n",
      "Epoch 3/5, Training Loss: 0.3556\n",
      "Epoch 3/5, Training Loss: 0.5891\n",
      "Epoch 3/5, Training Loss: 0.3969\n",
      "Epoch 3/5, Training Loss: 0.3656\n",
      "Epoch 3/5, Training Loss: 0.2815\n",
      "Epoch 3/5, Training Loss: 0.3550\n",
      "Epoch 3/5, Training Loss: 0.2910\n",
      "Epoch 3/5, Training Loss: 0.3456\n",
      "Epoch 3/5, Training Loss: 0.2891\n",
      "Epoch 3/5, Training Loss: 0.3652\n",
      "Epoch 3/5, Training Loss: 0.2032\n",
      "Epoch 3/5, Training Loss: 0.3976\n",
      "Epoch 3/5, Training Loss: 0.1977\n",
      "Epoch 3/5, Training Loss: 0.1671\n",
      "Epoch 3/5, Training Loss: 0.2520\n",
      "Epoch 3/5, Training Loss: 0.2570\n",
      "Epoch 3/5, Training Loss: 0.2167\n",
      "Epoch 3/5, Training Loss: 0.3646\n",
      "Epoch 3/5, Training Loss: 0.1966\n",
      "Epoch 3/5, Training Loss: 0.3922\n",
      "Epoch 3/5, Training Loss: 0.3509\n",
      "Epoch 3/5, Training Loss: 0.5298\n",
      "Epoch 3/5, Training Loss: 0.3211\n",
      "Epoch 3/5, Training Loss: 0.4086\n",
      "Epoch 3/5, Training Loss: 0.2504\n",
      "Epoch 3/5, Training Loss: 0.2277\n",
      "Epoch 3/5, Training Loss: 0.2693\n",
      "Epoch 3/5, Training Loss: 0.2647\n",
      "Epoch 3/5, Training Loss: 0.2112\n",
      "Epoch 3/5, Training Loss: 0.3889\n",
      "Epoch 3/5, Training Loss: 0.2956\n",
      "Epoch 3/5, Training Loss: 0.3829\n",
      "Epoch 3/5, Training Loss: 0.4276\n",
      "Epoch 3/5, Training Loss: 0.3296\n",
      "Epoch 3/5, Training Loss: 0.2429\n",
      "Epoch 3/5, Training Loss: 0.3612\n",
      "Epoch 3/5, Training Loss: 0.3887\n",
      "Epoch 3/5, Training Loss: 0.3293\n",
      "Epoch 3/5, Training Loss: 0.2016\n",
      "Epoch 3/5, Training Loss: 0.5460\n",
      "Epoch 3/5, Training Loss: 0.2295\n",
      "Epoch 3/5, Training Loss: 0.3432\n",
      "Epoch 3/5, Training Loss: 0.2994\n",
      "Epoch 3/5, Training Loss: 0.2129\n",
      "Epoch 3/5, Training Loss: 0.1649\n",
      "Epoch 3/5, Training Loss: 0.2695\n",
      "Epoch 3/5, Training Loss: 0.2374\n",
      "Epoch 3/5, Training Loss: 0.5688\n",
      "Epoch 3/5, Training Loss: 0.2851\n",
      "Epoch 3/5, Training Loss: 0.2337\n",
      "Epoch 3/5, Training Loss: 0.4935\n",
      "Epoch 3/5, Training Loss: 0.2909\n",
      "Epoch 3/5, Training Loss: 0.2535\n",
      "Epoch 3/5, Training Loss: 0.5229\n",
      "Epoch 3/5, Training Loss: 0.2503\n",
      "Epoch 3/5, Training Loss: 0.3108\n",
      "Epoch 3/5, Training Loss: 0.1172\n",
      "Epoch 3/5, Training Loss: 0.4280\n",
      "Epoch 3/5, Training Loss: 0.3605\n",
      "Epoch 3/5, Training Loss: 0.3858\n",
      "Epoch 3/5, Training Loss: 0.1680\n",
      "Epoch 3/5, Training Loss: 0.5717\n",
      "Epoch 3/5, Training Loss: 0.3387\n",
      "Epoch 3/5, Training Loss: 0.5580\n",
      "Epoch 3/5, Training Loss: 0.2380\n",
      "Epoch 3/5, Training Loss: 0.4365\n",
      "Epoch 3/5, Training Loss: 0.3850\n",
      "Epoch 3/5, Training Loss: 0.2252\n",
      "Epoch 3/5, Training Loss: 0.5765\n",
      "Epoch 3/5, Training Loss: 0.3210\n",
      "Epoch 3/5, Training Loss: 0.4116\n",
      "Epoch 3/5, Training Loss: 0.5332\n",
      "Epoch 3/5, Training Loss: 0.4062\n",
      "Epoch 3/5, Training Loss: 0.1963\n",
      "Epoch 3/5, Training Loss: 0.2696\n",
      "Epoch 3/5, Training Loss: 0.3690\n",
      "Epoch 3/5, Training Loss: 0.2902\n",
      "Epoch 3/5, Training Loss: 0.3154\n",
      "Epoch 3/5, Training Loss: 0.5617\n",
      "Epoch 3/5, Training Loss: 0.4090\n",
      "Epoch 3/5, Training Loss: 0.3479\n",
      "Epoch 3/5, Training Loss: 0.3340\n",
      "Epoch 3/5, Training Loss: 0.2926\n",
      "Epoch 3/5, Training Loss: 0.1564\n",
      "Epoch 3/5, Training Loss: 0.1770\n",
      "Epoch 3/5, Training Loss: 0.1844\n",
      "Epoch 3/5, Training Loss: 0.2587\n",
      "Epoch 3/5, Training Loss: 0.4122\n",
      "Epoch 3/5, Training Loss: 0.3406\n",
      "Epoch 3/5, Training Loss: 0.2132\n",
      "Epoch 3/5, Training Loss: 0.2861\n",
      "Epoch 3/5, Training Loss: 0.3935\n",
      "Epoch 3/5, Training Loss: 0.2677\n",
      "Epoch 3/5, Training Loss: 0.3415\n",
      "Epoch 3/5, Training Loss: 0.4674\n",
      "Epoch 3/5, Training Loss: 0.4849\n",
      "Epoch 3/5, Training Loss: 0.3049\n",
      "Epoch 3/5, Training Loss: 0.4326\n",
      "Epoch 3/5, Training Loss: 0.2147\n",
      "Epoch 3/5, Training Loss: 0.2140\n",
      "Epoch 3/5, Training Loss: 0.2320\n",
      "Epoch 3/5, Training Loss: 0.2736\n",
      "Epoch 3/5, Training Loss: 0.3481\n",
      "Epoch 3/5, Training Loss: 0.2575\n",
      "Epoch 3/5, Training Loss: 0.3729\n",
      "Epoch 3/5, Training Loss: 0.4467\n",
      "Epoch 3/5, Training Loss: 0.4074\n",
      "Epoch 3/5, Training Loss: 0.3805\n",
      "Epoch 3/5, Training Loss: 0.2938\n",
      "Epoch 3/5, Training Loss: 0.2418\n",
      "Epoch 3/5, Training Loss: 0.2393\n",
      "Epoch 3/5, Training Loss: 0.2476\n",
      "Epoch 3/5, Training Loss: 0.3110\n",
      "Epoch 3/5, Training Loss: 0.2421\n",
      "Epoch 3/5, Training Loss: 0.1997\n",
      "Epoch 3/5, Training Loss: 0.6771\n",
      "Epoch 3/5, Training Loss: 0.3177\n",
      "Epoch 3/5, Training Loss: 0.4258\n",
      "Epoch 3/5, Training Loss: 0.3632\n",
      "Epoch 3/5, Training Loss: 0.4397\n",
      "Epoch 3/5, Training Loss: 0.3049\n",
      "Epoch 3/5, Training Loss: 0.3866\n",
      "Epoch 3/5, Training Loss: 0.4136\n",
      "Epoch 3/5, Training Loss: 0.4444\n",
      "Epoch 3/5, Training Loss: 0.3787\n",
      "Epoch 3/5, Training Loss: 0.2154\n",
      "Epoch 3/5, Training Loss: 0.2700\n",
      "Epoch 3/5, Training Loss: 0.3786\n",
      "Epoch 3/5, Training Loss: 0.1301\n",
      "Epoch 3/5, Training Loss: 0.2779\n",
      "Epoch 3/5, Training Loss: 0.2463\n",
      "Epoch 3/5, Training Loss: 0.2909\n",
      "Epoch 3/5, Training Loss: 0.1673\n",
      "Epoch 3/5, Training Loss: 0.3702\n",
      "Epoch 3/5, Training Loss: 0.2747\n",
      "Epoch 3/5, Training Loss: 0.4483\n",
      "Epoch 3/5, Training Loss: 0.2416\n",
      "Epoch 3/5, Training Loss: 0.2494\n",
      "Epoch 3/5, Training Loss: 0.5888\n",
      "Epoch 3/5, Training Loss: 0.2396\n",
      "Epoch 3/5, Training Loss: 0.3272\n",
      "Epoch 3/5, Training Loss: 0.3031\n",
      "Epoch 3/5, Training Loss: 0.3472\n",
      "Epoch 3/5, Training Loss: 0.3499\n",
      "Epoch 3/5, Training Loss: 0.4630\n",
      "Epoch 3/5, Training Loss: 0.4730\n",
      "Epoch 3/5, Training Loss: 0.3689\n",
      "Epoch 3/5, Training Loss: 0.3796\n",
      "Epoch 3/5, Training Loss: 0.3307\n",
      "Epoch 3/5, Training Loss: 0.1594\n",
      "Epoch 3/5, Training Loss: 0.4028\n",
      "Epoch 3/5, Training Loss: 0.3371\n",
      "Epoch 3/5, Training Loss: 0.1554\n",
      "Epoch 3/5, Training Loss: 0.2736\n",
      "Epoch 3/5, Training Loss: 0.2914\n",
      "Epoch 3/5, Training Loss: 0.2571\n",
      "Epoch 3/5, Training Loss: 0.4938\n",
      "Epoch 3/5, Training Loss: 0.3706\n",
      "Epoch 3/5, Training Loss: 0.3367\n",
      "Epoch 3/5, Training Loss: 0.3125\n",
      "Epoch 3/5, Training Loss: 0.3329\n",
      "Epoch 3/5, Training Loss: 0.2809\n",
      "Epoch 3/5, Training Loss: 0.2091\n",
      "Epoch 3/5, Training Loss: 0.2176\n",
      "Epoch 3/5, Training Loss: 0.3231\n",
      "Epoch 3/5, Training Loss: 0.4128\n",
      "Epoch 3/5, Training Loss: 0.2802\n",
      "Epoch 3/5, Training Loss: 0.4143\n",
      "Epoch 3/5, Training Loss: 0.2490\n",
      "Epoch 3/5, Training Loss: 0.4376\n",
      "Epoch 3/5, Training Loss: 0.2636\n",
      "Epoch 3/5, Training Loss: 0.2984\n",
      "Epoch 3/5, Training Loss: 0.4121\n",
      "Epoch 3/5, Training Loss: 0.1583\n",
      "Epoch 3/5, Training Loss: 0.5787\n",
      "Epoch 3/5, Training Loss: 0.2506\n",
      "Epoch 3/5, Training Loss: 0.1905\n",
      "Epoch 3/5, Training Loss: 0.4547\n",
      "Epoch 3/5, Training Loss: 0.4543\n",
      "Epoch 3/5, Training Loss: 0.3801\n",
      "Epoch 3/5, Training Loss: 0.2700\n",
      "Epoch 3/5, Training Loss: 0.4379\n",
      "Epoch 3/5, Training Loss: 0.2469\n",
      "Epoch 3/5, Training Loss: 0.5063\n",
      "Epoch 3/5, Training Loss: 0.2415\n",
      "Epoch 3/5, Training Loss: 0.4718\n",
      "Epoch 3/5, Training Loss: 0.4110\n",
      "Epoch 3/5, Training Loss: 0.3805\n",
      "Epoch 3/5, Training Loss: 0.3840\n",
      "Epoch 3/5, Training Loss: 0.2866\n",
      "Epoch 3/5, Training Loss: 0.2033\n",
      "Epoch 3/5, Training Loss: 0.3101\n",
      "Epoch 3/5, Training Loss: 0.2823\n",
      "Epoch 3/5, Training Loss: 0.3854\n",
      "Epoch 3/5, Training Loss: 0.4553\n",
      "Epoch 3/5, Training Loss: 0.2600\n",
      "Epoch 3/5, Training Loss: 0.4764\n",
      "Epoch 3/5, Training Loss: 0.4976\n",
      "Epoch 3/5, Training Loss: 0.3333\n",
      "Epoch 3/5, Training Loss: 0.4223\n",
      "Epoch 3/5, Training Loss: 0.3116\n",
      "Epoch 3/5, Training Loss: 0.3892\n",
      "Epoch 3/5, Training Loss: 0.4509\n",
      "Epoch 3/5, Training Loss: 0.2386\n",
      "Epoch 3/5, Training Loss: 0.1941\n",
      "Epoch 3/5, Training Loss: 0.2025\n",
      "Epoch 3/5, Training Loss: 0.2287\n",
      "Epoch 3/5, Training Loss: 0.3848\n",
      "Epoch 3/5, Training Loss: 0.2575\n",
      "Epoch 3/5, Training Loss: 0.2352\n",
      "Epoch 3/5, Training Loss: 0.4184\n",
      "Epoch 3/5, Training Loss: 0.3364\n",
      "Epoch 3/5, Training Loss: 0.2932\n",
      "Epoch 3/5, Training Loss: 0.4858\n",
      "Epoch 3/5, Training Loss: 0.3763\n",
      "Epoch 3/5, Training Loss: 0.2881\n",
      "Epoch 3/5, Training Loss: 0.4097\n",
      "Epoch 3/5, Training Loss: 0.3082\n",
      "Epoch 3/5, Training Loss: 0.3720\n",
      "Epoch 3/5, Training Loss: 0.3408\n",
      "Epoch 3/5, Training Loss: 0.3296\n",
      "Epoch 3/5, Training Loss: 0.3906\n",
      "Epoch 3/5, Training Loss: 0.2559\n",
      "Epoch 3/5, Training Loss: 0.2542\n",
      "Epoch 3/5, Training Loss: 0.1859\n",
      "Epoch 3/5, Training Loss: 0.5651\n",
      "Epoch 3/5, Training Loss: 0.3673\n",
      "Epoch 3/5, Training Loss: 0.5477\n",
      "Epoch 3/5, Training Loss: 0.2074\n",
      "Epoch 3/5, Training Loss: 0.1932\n",
      "Epoch 3/5, Training Loss: 0.3144\n",
      "Epoch 3/5, Training Loss: 0.4571\n",
      "Epoch 3/5, Training Loss: 0.3014\n",
      "Epoch 3/5, Training Loss: 0.6144\n",
      "Epoch 3/5, Training Loss: 0.4920\n",
      "Epoch 3/5, Training Loss: 0.2952\n",
      "Epoch 3/5, Training Loss: 0.1911\n",
      "Epoch 3/5, Training Loss: 0.4950\n",
      "Epoch 3/5, Training Loss: 0.3330\n",
      "Epoch 3/5, Training Loss: 0.2814\n",
      "Epoch 3/5, Training Loss: 0.2886\n",
      "Epoch 3/5, Training Loss: 0.3408\n",
      "Epoch 3/5, Training Loss: 0.3601\n",
      "Epoch 3/5, Training Loss: 0.2510\n",
      "Epoch 3/5, Training Loss: 0.5664\n",
      "Epoch 3/5, Training Loss: 0.3079\n",
      "Epoch 3/5, Training Loss: 0.3648\n",
      "Epoch 3/5, Training Loss: 0.2508\n",
      "Epoch 3/5, Training Loss: 0.2496\n",
      "Epoch 3/5, Training Loss: 0.3431\n",
      "Epoch 3/5, Training Loss: 0.2457\n",
      "Epoch 3/5, Training Loss: 0.2944\n",
      "Epoch 3/5, Training Loss: 0.2890\n",
      "Epoch 3/5, Training Loss: 0.2380\n",
      "Epoch 3/5, Training Loss: 0.3190\n",
      "Epoch 3/5, Training Loss: 0.3823\n",
      "Epoch 3/5, Training Loss: 0.2487\n",
      "Epoch 3/5, Training Loss: 0.3325\n",
      "Epoch 3/5, Training Loss: 0.2182\n",
      "Epoch 3/5, Training Loss: 0.2875\n",
      "Epoch 3/5, Training Loss: 0.3789\n",
      "Epoch 3/5, Training Loss: 0.4628\n",
      "Epoch 3/5, Training Loss: 0.4593\n",
      "Epoch 3/5, Training Loss: 0.4549\n",
      "Epoch 3/5, Training Loss: 0.3656\n",
      "Epoch 3/5, Training Loss: 0.2397\n",
      "Epoch 3/5, Training Loss: 0.2646\n",
      "Epoch 3/5, Training Loss: 0.2970\n",
      "Epoch 3/5, Training Loss: 0.2622\n",
      "Epoch 3/5, Training Loss: 0.4384\n",
      "Epoch 3/5, Training Loss: 0.2030\n",
      "Epoch 3/5, Training Loss: 0.2418\n",
      "Epoch 3/5, Training Loss: 0.3474\n",
      "Epoch 3/5, Training Loss: 0.3351\n",
      "Epoch 3/5, Training Loss: 0.2934\n",
      "Epoch 3/5, Training Loss: 0.2389\n",
      "Epoch 3/5, Training Loss: 0.2161\n",
      "Epoch 3/5, Training Loss: 0.2685\n",
      "Epoch 3/5, Training Loss: 0.2672\n",
      "Epoch 3/5, Training Loss: 0.2754\n",
      "Epoch 3/5, Training Loss: 0.2032\n",
      "Epoch 3/5, Training Loss: 0.3520\n",
      "Epoch 3/5, Training Loss: 0.3496\n",
      "Epoch 3/5, Training Loss: 0.4243\n",
      "Epoch 3/5, Training Loss: 0.2168\n",
      "Epoch 3/5, Training Loss: 0.2789\n",
      "Epoch 3/5, Training Loss: 0.3655\n",
      "Epoch 3/5, Training Loss: 0.6732\n",
      "Epoch 3/5, Training Loss: 0.3407\n",
      "Epoch 3/5, Training Loss: 0.3871\n",
      "Epoch 3/5, Training Loss: 0.5391\n",
      "Epoch 3/5, Training Loss: 0.3722\n",
      "Epoch 3/5, Training Loss: 0.3403\n",
      "Epoch 3/5, Training Loss: 0.4946\n",
      "Epoch 3/5, Training Loss: 0.1737\n",
      "Epoch 3/5, Training Loss: 0.2947\n",
      "Epoch 3/5, Training Loss: 0.3158\n",
      "Epoch 3/5, Training Loss: 0.3075\n",
      "Epoch 3/5, Training Loss: 0.1543\n",
      "Epoch 3/5, Training Loss: 0.3722\n",
      "Epoch 3/5, Training Loss: 0.2779\n",
      "Epoch 3/5, Training Loss: 0.3483\n",
      "Epoch 3/5, Training Loss: 0.2086\n",
      "Epoch 3/5, Training Loss: 0.4136\n",
      "Epoch 3/5, Training Loss: 0.3409\n",
      "Epoch 3/5, Training Loss: 0.2853\n",
      "Epoch 3/5, Training Loss: 0.2622\n",
      "Epoch 3/5, Training Loss: 0.4109\n",
      "Epoch 3/5, Training Loss: 0.4543\n",
      "Epoch 3/5, Training Loss: 0.3429\n",
      "Epoch 3/5, Training Loss: 0.3694\n",
      "Epoch 3/5, Training Loss: 0.3544\n",
      "Epoch 3/5, Training Loss: 0.2412\n",
      "Epoch 3/5, Training Loss: 0.3352\n",
      "Epoch 3/5, Training Loss: 0.3301\n",
      "Epoch 3/5, Training Loss: 0.3612\n",
      "Epoch 3/5, Training Loss: 0.2125\n",
      "Epoch 3/5, Training Loss: 0.3324\n",
      "Epoch 3/5, Training Loss: 0.2457\n",
      "Epoch 3/5, Training Loss: 0.1401\n",
      "Epoch 3/5, Training Loss: 0.3550\n",
      "Epoch 3/5, Training Loss: 0.2053\n",
      "Epoch 3/5, Training Loss: 0.2315\n",
      "Epoch 3/5, Training Loss: 0.3499\n",
      "Epoch 3/5, Training Loss: 0.2680\n",
      "Epoch 3/5, Training Loss: 0.2729\n",
      "Epoch 3/5, Training Loss: 0.3413\n",
      "Epoch 3/5, Training Loss: 0.2385\n",
      "Epoch 3/5, Training Loss: 0.4624\n",
      "Epoch 3/5, Training Loss: 0.3297\n",
      "Epoch 3/5, Training Loss: 0.5097\n",
      "Epoch 3/5, Training Loss: 0.2577\n",
      "Epoch 3/5, Training Loss: 0.1992\n",
      "Epoch 3/5, Training Loss: 0.2673\n",
      "Epoch 3/5, Training Loss: 0.4396\n",
      "Epoch 3/5, Training Loss: 0.2118\n",
      "Epoch 3/5, Training Loss: 0.3317\n",
      "Epoch 3/5, Training Loss: 0.2446\n",
      "Epoch 3/5, Training Loss: 0.0864\n",
      "Epoch 3/5, Training Loss: 0.3763\n",
      "Epoch 3/5, Training Loss: 0.3297\n",
      "Epoch 3/5, Training Loss: 0.3683\n",
      "Epoch 3/5, Training Loss: 0.3721\n",
      "Epoch 3/5, Training Loss: 0.5150\n",
      "Epoch 3/5, Training Loss: 0.1684\n",
      "Epoch 3/5, Training Loss: 0.2324\n",
      "Epoch 3/5, Training Loss: 0.2579\n",
      "Epoch 3/5, Training Loss: 0.1802\n",
      "Epoch 3/5, Training Loss: 0.4504\n",
      "Epoch 3/5, Training Loss: 0.3361\n",
      "Epoch 3/5, Training Loss: 0.2997\n",
      "Epoch 3/5, Training Loss: 0.4849\n",
      "Epoch 3/5, Training Loss: 0.5439\n",
      "Epoch 3/5, Training Loss: 0.2651\n",
      "Epoch 3/5, Training Loss: 0.1932\n",
      "Epoch 3/5, Training Loss: 0.3587\n",
      "Epoch 3/5, Training Loss: 0.2228\n",
      "Epoch 3/5, Training Loss: 0.3324\n",
      "Epoch 3/5, Training Loss: 0.4366\n",
      "Epoch 3/5, Training Loss: 0.3391\n",
      "Epoch 3/5, Training Loss: 0.3453\n",
      "Epoch 3/5, Training Loss: 0.2624\n",
      "Epoch 3/5, Training Loss: 0.3905\n",
      "Epoch 3/5, Training Loss: 0.2630\n",
      "Epoch 3/5, Training Loss: 0.2557\n",
      "Epoch 3/5, Training Loss: 0.3130\n",
      "Epoch 3/5, Training Loss: 0.2569\n",
      "Epoch 3/5, Training Loss: 0.2915\n",
      "Epoch 3/5, Training Loss: 0.3691\n",
      "Epoch 3/5, Training Loss: 0.3346\n",
      "Epoch 3/5, Training Loss: 0.4653\n",
      "Epoch 3/5, Training Loss: 0.2675\n",
      "Epoch 3/5, Training Loss: 0.2327\n",
      "Epoch 3/5, Training Loss: 0.3509\n",
      "Epoch 3/5, Training Loss: 0.2058\n",
      "Epoch 3/5, Training Loss: 0.2108\n",
      "Epoch 3/5, Training Loss: 0.1567\n",
      "Epoch 3/5, Training Loss: 0.3133\n",
      "Epoch 3/5, Training Loss: 0.3379\n",
      "Epoch 3/5, Training Loss: 0.3234\n",
      "Epoch 3/5, Training Loss: 0.1758\n",
      "Epoch 3/5, Training Loss: 0.2357\n",
      "Epoch 3/5, Training Loss: 0.3447\n",
      "Epoch 3/5, Training Loss: 0.3492\n",
      "Epoch 3/5, Training Loss: 0.2275\n",
      "Epoch 3/5, Training Loss: 0.3285\n",
      "Epoch 3/5, Training Loss: 0.2582\n",
      "Epoch 3/5, Training Loss: 0.4839\n",
      "Epoch 3/5, Training Loss: 0.6253\n",
      "Epoch 3/5, Training Loss: 0.3361\n",
      "Epoch 3/5, Training Loss: 0.3624\n",
      "Epoch 3/5, Training Loss: 0.3084\n",
      "Epoch 3/5, Training Loss: 0.3038\n",
      "Epoch 3/5, Training Loss: 0.1204\n",
      "Epoch 3/5, Training Loss: 0.3656\n",
      "Epoch 3/5, Training Loss: 0.6294\n",
      "Epoch 3/5, Training Loss: 0.2916\n",
      "Epoch 3/5, Training Loss: 0.2167\n",
      "Epoch 3/5, Training Loss: 0.5223\n",
      "Epoch 3/5, Training Loss: 0.2573\n",
      "Epoch 3/5, Training Loss: 0.4597\n",
      "Epoch 3/5, Training Loss: 0.2333\n",
      "Epoch 3/5, Training Loss: 0.2780\n",
      "Epoch 3/5, Training Loss: 0.1811\n",
      "Epoch 3/5, Training Loss: 0.2308\n",
      "Epoch 3/5, Training Loss: 0.3971\n",
      "Epoch 3/5, Training Loss: 0.4483\n",
      "Epoch 3/5, Training Loss: 0.5378\n",
      "Epoch 3/5, Training Loss: 0.4637\n",
      "Epoch 3/5, Training Loss: 0.4734\n",
      "Epoch 3/5, Training Loss: 0.3176\n",
      "Epoch 3/5, Training Loss: 0.5726\n",
      "Epoch 3/5, Training Loss: 0.5173\n",
      "Epoch 3/5, Training Loss: 0.2759\n",
      "Epoch 3/5, Training Loss: 0.4409\n",
      "Epoch 3/5, Training Loss: 0.3548\n",
      "Epoch 3/5, Training Loss: 0.2709\n",
      "Epoch 3/5, Training Loss: 0.5885\n",
      "Epoch 3/5, Training Loss: 0.3156\n",
      "Epoch 3/5, Training Loss: 0.2467\n",
      "Epoch 3/5, Training Loss: 0.2849\n",
      "Epoch 3/5, Training Loss: 0.2486\n",
      "Epoch 3/5, Training Loss: 0.4137\n",
      "Epoch 3/5, Training Loss: 0.2211\n",
      "Epoch 3/5, Training Loss: 0.4882\n",
      "Epoch 3/5, Training Loss: 0.4026\n",
      "Epoch 3/5, Training Loss: 0.5052\n",
      "Epoch 3/5, Training Loss: 0.2252\n",
      "Epoch 3/5, Training Loss: 0.3036\n",
      "Epoch 3/5, Training Loss: 0.2296\n",
      "Epoch 3/5, Training Loss: 0.3236\n",
      "Epoch 3/5, Training Loss: 0.3721\n",
      "Epoch 3/5, Training Loss: 0.2512\n",
      "Epoch 3/5, Training Loss: 0.2693\n",
      "Epoch 3/5, Training Loss: 0.4188\n",
      "Epoch 3/5, Training Loss: 0.2938\n",
      "Epoch 3/5, Training Loss: 0.4762\n",
      "Epoch 3/5, Training Loss: 0.1556\n",
      "Epoch 3/5, Training Loss: 0.2613\n",
      "Epoch 3/5, Training Loss: 0.3042\n",
      "Epoch 3/5, Training Loss: 0.3190\n",
      "Epoch 3/5, Training Loss: 0.4619\n",
      "Epoch 3/5, Training Loss: 0.2345\n",
      "Epoch 3/5, Training Loss: 0.3113\n",
      "Epoch 3/5, Training Loss: 0.2981\n",
      "Epoch 3/5, Training Loss: 0.4141\n",
      "Epoch 3/5, Training Loss: 0.3894\n",
      "Epoch 3/5, Training Loss: 0.2224\n",
      "Epoch 3/5, Training Loss: 0.2886\n",
      "Epoch 3/5, Training Loss: 0.3601\n",
      "Epoch 3/5, Training Loss: 0.4136\n",
      "Epoch 3/5, Training Loss: 0.2508\n",
      "Epoch 3/5, Training Loss: 0.2327\n",
      "Epoch 3/5, Training Loss: 0.3575\n",
      "Epoch 3/5, Training Loss: 0.2600\n",
      "Epoch 3/5, Training Loss: 0.3854\n",
      "Epoch 3/5, Training Loss: 0.3975\n",
      "Epoch 3/5, Training Loss: 0.4084\n",
      "Epoch 3/5, Training Loss: 0.2015\n",
      "Epoch 3/5, Training Loss: 0.2154\n",
      "Epoch 3/5, Training Loss: 0.1784\n",
      "Epoch 3/5, Training Loss: 0.3109\n",
      "Epoch 3/5, Training Loss: 0.3034\n",
      "Epoch 3/5, Training Loss: 0.2481\n",
      "Epoch 3/5, Training Loss: 0.4774\n",
      "Epoch 3/5, Training Loss: 0.4234\n",
      "Epoch 3/5, Training Loss: 0.2734\n",
      "Epoch 3/5, Training Loss: 0.4051\n",
      "Epoch 3/5, Training Loss: 0.2574\n",
      "Epoch 3/5, Training Loss: 0.4140\n",
      "Epoch 3/5, Training Loss: 0.3989\n",
      "Epoch 3/5, Training Loss: 0.4205\n",
      "Epoch 3/5, Training Loss: 0.2262\n",
      "Epoch 3/5, Training Loss: 0.2439\n",
      "Epoch 3/5, Training Loss: 0.2858\n",
      "Epoch 3/5, Training Loss: 0.2473\n",
      "Epoch 3/5, Training Loss: 0.2451\n",
      "Epoch 3/5, Training Loss: 0.2625\n",
      "Epoch 3/5, Training Loss: 0.3960\n",
      "Epoch 3/5, Training Loss: 0.6430\n",
      "Epoch 3/5, Training Loss: 0.3343\n",
      "Epoch 3/5, Training Loss: 0.2411\n",
      "Epoch 3/5, Training Loss: 0.4709\n",
      "Epoch 3/5, Training Loss: 0.2131\n",
      "Epoch 3/5, Training Loss: 0.2538\n",
      "Epoch 3/5, Training Loss: 0.2745\n",
      "Epoch 3/5, Training Loss: 0.1361\n",
      "Epoch 3/5, Training Loss: 0.4391\n",
      "Epoch 3/5, Training Loss: 0.2286\n",
      "Epoch 3/5, Training Loss: 0.3993\n",
      "Epoch 3/5, Training Loss: 0.5287\n",
      "Epoch 3/5, Training Loss: 0.2571\n",
      "Epoch 3/5, Training Loss: 0.4223\n",
      "Epoch 3/5, Training Loss: 0.3585\n",
      "Epoch 3/5, Training Loss: 0.3790\n",
      "Epoch 3/5, Training Loss: 0.3271\n",
      "Epoch 3/5, Training Loss: 0.4222\n",
      "Epoch 3/5, Training Loss: 0.2860\n",
      "Epoch 3/5, Training Loss: 0.4125\n",
      "Epoch 3/5, Training Loss: 0.2809\n",
      "Epoch 3/5, Training Loss: 0.2848\n",
      "Epoch 3/5, Training Loss: 0.2844\n",
      "Epoch 3/5, Training Loss: 0.2975\n",
      "Epoch 3/5, Training Loss: 0.3114\n",
      "Epoch 3/5, Training Loss: 0.2269\n",
      "Epoch 3/5, Training Loss: 0.4730\n",
      "Epoch 3/5, Training Loss: 0.5266\n",
      "Epoch 3/5, Training Loss: 0.3494\n",
      "Epoch 3/5, Training Loss: 0.2049\n",
      "Epoch 3/5, Training Loss: 0.3377\n",
      "Epoch 3/5, Training Loss: 0.2079\n",
      "Epoch 3/5, Training Loss: 0.1463\n",
      "Epoch 3/5, Training Loss: 0.2529\n",
      "Epoch 3/5, Training Loss: 0.3856\n",
      "Epoch 3/5, Training Loss: 0.3856, Cross-Validation Loss: 0.4640\n",
      "Epoch 4/5, Training Loss: 0.2176\n",
      "Epoch 4/5, Training Loss: 0.1560\n",
      "Epoch 4/5, Training Loss: 0.2718\n",
      "Epoch 4/5, Training Loss: 0.2216\n",
      "Epoch 4/5, Training Loss: 0.2255\n",
      "Epoch 4/5, Training Loss: 0.1456\n",
      "Epoch 4/5, Training Loss: 0.1359\n",
      "Epoch 4/5, Training Loss: 0.1095\n",
      "Epoch 4/5, Training Loss: 0.3641\n",
      "Epoch 4/5, Training Loss: 0.2440\n",
      "Epoch 4/5, Training Loss: 0.1997\n",
      "Epoch 4/5, Training Loss: 0.1314\n",
      "Epoch 4/5, Training Loss: 0.2231\n",
      "Epoch 4/5, Training Loss: 0.2822\n",
      "Epoch 4/5, Training Loss: 0.1165\n",
      "Epoch 4/5, Training Loss: 0.1605\n",
      "Epoch 4/5, Training Loss: 0.2669\n",
      "Epoch 4/5, Training Loss: 0.2153\n",
      "Epoch 4/5, Training Loss: 0.3964\n",
      "Epoch 4/5, Training Loss: 0.1725\n",
      "Epoch 4/5, Training Loss: 0.2591\n",
      "Epoch 4/5, Training Loss: 0.2675\n",
      "Epoch 4/5, Training Loss: 0.1046\n",
      "Epoch 4/5, Training Loss: 0.1224\n",
      "Epoch 4/5, Training Loss: 0.2223\n",
      "Epoch 4/5, Training Loss: 0.1107\n",
      "Epoch 4/5, Training Loss: 0.1685\n",
      "Epoch 4/5, Training Loss: 0.0784\n",
      "Epoch 4/5, Training Loss: 0.1110\n",
      "Epoch 4/5, Training Loss: 0.2561\n",
      "Epoch 4/5, Training Loss: 0.1108\n",
      "Epoch 4/5, Training Loss: 0.2481\n",
      "Epoch 4/5, Training Loss: 0.1937\n",
      "Epoch 4/5, Training Loss: 0.1035\n",
      "Epoch 4/5, Training Loss: 0.1700\n",
      "Epoch 4/5, Training Loss: 0.1680\n",
      "Epoch 4/5, Training Loss: 0.2251\n",
      "Epoch 4/5, Training Loss: 0.2225\n",
      "Epoch 4/5, Training Loss: 0.1492\n",
      "Epoch 4/5, Training Loss: 0.0505\n",
      "Epoch 4/5, Training Loss: 0.1737\n",
      "Epoch 4/5, Training Loss: 0.1833\n",
      "Epoch 4/5, Training Loss: 0.2313\n",
      "Epoch 4/5, Training Loss: 0.2059\n",
      "Epoch 4/5, Training Loss: 0.1849\n",
      "Epoch 4/5, Training Loss: 0.3273\n",
      "Epoch 4/5, Training Loss: 0.1289\n",
      "Epoch 4/5, Training Loss: 0.2209\n",
      "Epoch 4/5, Training Loss: 0.0853\n",
      "Epoch 4/5, Training Loss: 0.2085\n",
      "Epoch 4/5, Training Loss: 0.1323\n",
      "Epoch 4/5, Training Loss: 0.1434\n",
      "Epoch 4/5, Training Loss: 0.1570\n",
      "Epoch 4/5, Training Loss: 0.2007\n",
      "Epoch 4/5, Training Loss: 0.0988\n",
      "Epoch 4/5, Training Loss: 0.2217\n",
      "Epoch 4/5, Training Loss: 0.1366\n",
      "Epoch 4/5, Training Loss: 0.1073\n",
      "Epoch 4/5, Training Loss: 0.1716\n",
      "Epoch 4/5, Training Loss: 0.1166\n",
      "Epoch 4/5, Training Loss: 0.1392\n",
      "Epoch 4/5, Training Loss: 0.2277\n",
      "Epoch 4/5, Training Loss: 0.1947\n",
      "Epoch 4/5, Training Loss: 0.1911\n",
      "Epoch 4/5, Training Loss: 0.1272\n",
      "Epoch 4/5, Training Loss: 0.3735\n",
      "Epoch 4/5, Training Loss: 0.1349\n",
      "Epoch 4/5, Training Loss: 0.4172\n",
      "Epoch 4/5, Training Loss: 0.0928\n",
      "Epoch 4/5, Training Loss: 0.2139\n",
      "Epoch 4/5, Training Loss: 0.2779\n",
      "Epoch 4/5, Training Loss: 0.1165\n",
      "Epoch 4/5, Training Loss: 0.1472\n",
      "Epoch 4/5, Training Loss: 0.1290\n",
      "Epoch 4/5, Training Loss: 0.1771\n",
      "Epoch 4/5, Training Loss: 0.1575\n",
      "Epoch 4/5, Training Loss: 0.0787\n",
      "Epoch 4/5, Training Loss: 0.2983\n",
      "Epoch 4/5, Training Loss: 0.1459\n",
      "Epoch 4/5, Training Loss: 0.1309\n",
      "Epoch 4/5, Training Loss: 0.1706\n",
      "Epoch 4/5, Training Loss: 0.3091\n",
      "Epoch 4/5, Training Loss: 0.2787\n",
      "Epoch 4/5, Training Loss: 0.0691\n",
      "Epoch 4/5, Training Loss: 0.2704\n",
      "Epoch 4/5, Training Loss: 0.2007\n",
      "Epoch 4/5, Training Loss: 0.2008\n",
      "Epoch 4/5, Training Loss: 0.2613\n",
      "Epoch 4/5, Training Loss: 0.2854\n",
      "Epoch 4/5, Training Loss: 0.1628\n",
      "Epoch 4/5, Training Loss: 0.1706\n",
      "Epoch 4/5, Training Loss: 0.2303\n",
      "Epoch 4/5, Training Loss: 0.3669\n",
      "Epoch 4/5, Training Loss: 0.1855\n",
      "Epoch 4/5, Training Loss: 0.2423\n",
      "Epoch 4/5, Training Loss: 0.2256\n",
      "Epoch 4/5, Training Loss: 0.3650\n",
      "Epoch 4/5, Training Loss: 0.2355\n",
      "Epoch 4/5, Training Loss: 0.2536\n",
      "Epoch 4/5, Training Loss: 0.3171\n",
      "Epoch 4/5, Training Loss: 0.3996\n",
      "Epoch 4/5, Training Loss: 0.2015\n",
      "Epoch 4/5, Training Loss: 0.1657\n",
      "Epoch 4/5, Training Loss: 0.1425\n",
      "Epoch 4/5, Training Loss: 0.1401\n",
      "Epoch 4/5, Training Loss: 0.1745\n",
      "Epoch 4/5, Training Loss: 0.2658\n",
      "Epoch 4/5, Training Loss: 0.2414\n",
      "Epoch 4/5, Training Loss: 0.1935\n",
      "Epoch 4/5, Training Loss: 0.1695\n",
      "Epoch 4/5, Training Loss: 0.2328\n",
      "Epoch 4/5, Training Loss: 0.2608\n",
      "Epoch 4/5, Training Loss: 0.1013\n",
      "Epoch 4/5, Training Loss: 0.1432\n",
      "Epoch 4/5, Training Loss: 0.1572\n",
      "Epoch 4/5, Training Loss: 0.1408\n",
      "Epoch 4/5, Training Loss: 0.1643\n",
      "Epoch 4/5, Training Loss: 0.3293\n",
      "Epoch 4/5, Training Loss: 0.2595\n",
      "Epoch 4/5, Training Loss: 0.3526\n",
      "Epoch 4/5, Training Loss: 0.1697\n",
      "Epoch 4/5, Training Loss: 0.1984\n",
      "Epoch 4/5, Training Loss: 0.2979\n",
      "Epoch 4/5, Training Loss: 0.1147\n",
      "Epoch 4/5, Training Loss: 0.1232\n",
      "Epoch 4/5, Training Loss: 0.2390\n",
      "Epoch 4/5, Training Loss: 0.2926\n",
      "Epoch 4/5, Training Loss: 0.2911\n",
      "Epoch 4/5, Training Loss: 0.2360\n",
      "Epoch 4/5, Training Loss: 0.1125\n",
      "Epoch 4/5, Training Loss: 0.3123\n",
      "Epoch 4/5, Training Loss: 0.0726\n",
      "Epoch 4/5, Training Loss: 0.2233\n",
      "Epoch 4/5, Training Loss: 0.1562\n",
      "Epoch 4/5, Training Loss: 0.1578\n",
      "Epoch 4/5, Training Loss: 0.1664\n",
      "Epoch 4/5, Training Loss: 0.0586\n",
      "Epoch 4/5, Training Loss: 0.2665\n",
      "Epoch 4/5, Training Loss: 0.2845\n",
      "Epoch 4/5, Training Loss: 0.1189\n",
      "Epoch 4/5, Training Loss: 0.1680\n",
      "Epoch 4/5, Training Loss: 0.2632\n",
      "Epoch 4/5, Training Loss: 0.1823\n",
      "Epoch 4/5, Training Loss: 0.1280\n",
      "Epoch 4/5, Training Loss: 0.1324\n",
      "Epoch 4/5, Training Loss: 0.3864\n",
      "Epoch 4/5, Training Loss: 0.0669\n",
      "Epoch 4/5, Training Loss: 0.3437\n",
      "Epoch 4/5, Training Loss: 0.1567\n",
      "Epoch 4/5, Training Loss: 0.1597\n",
      "Epoch 4/5, Training Loss: 0.1378\n",
      "Epoch 4/5, Training Loss: 0.1938\n",
      "Epoch 4/5, Training Loss: 0.2262\n",
      "Epoch 4/5, Training Loss: 0.1966\n",
      "Epoch 4/5, Training Loss: 0.3288\n",
      "Epoch 4/5, Training Loss: 0.0712\n",
      "Epoch 4/5, Training Loss: 0.2084\n",
      "Epoch 4/5, Training Loss: 0.1123\n",
      "Epoch 4/5, Training Loss: 0.2395\n",
      "Epoch 4/5, Training Loss: 0.1619\n",
      "Epoch 4/5, Training Loss: 0.1238\n",
      "Epoch 4/5, Training Loss: 0.3210\n",
      "Epoch 4/5, Training Loss: 0.4161\n",
      "Epoch 4/5, Training Loss: 0.3083\n",
      "Epoch 4/5, Training Loss: 0.1701\n",
      "Epoch 4/5, Training Loss: 0.2536\n",
      "Epoch 4/5, Training Loss: 0.2059\n",
      "Epoch 4/5, Training Loss: 0.1975\n",
      "Epoch 4/5, Training Loss: 0.2794\n",
      "Epoch 4/5, Training Loss: 0.1812\n",
      "Epoch 4/5, Training Loss: 0.1150\n",
      "Epoch 4/5, Training Loss: 0.2033\n",
      "Epoch 4/5, Training Loss: 0.1827\n",
      "Epoch 4/5, Training Loss: 0.0551\n",
      "Epoch 4/5, Training Loss: 0.2696\n",
      "Epoch 4/5, Training Loss: 0.1048\n",
      "Epoch 4/5, Training Loss: 0.3095\n",
      "Epoch 4/5, Training Loss: 0.2279\n",
      "Epoch 4/5, Training Loss: 0.2167\n",
      "Epoch 4/5, Training Loss: 0.1424\n",
      "Epoch 4/5, Training Loss: 0.1326\n",
      "Epoch 4/5, Training Loss: 0.2076\n",
      "Epoch 4/5, Training Loss: 0.1252\n",
      "Epoch 4/5, Training Loss: 0.1647\n",
      "Epoch 4/5, Training Loss: 0.1207\n",
      "Epoch 4/5, Training Loss: 0.3024\n",
      "Epoch 4/5, Training Loss: 0.3420\n",
      "Epoch 4/5, Training Loss: 0.3707\n",
      "Epoch 4/5, Training Loss: 0.1928\n",
      "Epoch 4/5, Training Loss: 0.0849\n",
      "Epoch 4/5, Training Loss: 0.2025\n",
      "Epoch 4/5, Training Loss: 0.2731\n",
      "Epoch 4/5, Training Loss: 0.1896\n",
      "Epoch 4/5, Training Loss: 0.1724\n",
      "Epoch 4/5, Training Loss: 0.3416\n",
      "Epoch 4/5, Training Loss: 0.1692\n",
      "Epoch 4/5, Training Loss: 0.2578\n",
      "Epoch 4/5, Training Loss: 0.3097\n",
      "Epoch 4/5, Training Loss: 0.1248\n",
      "Epoch 4/5, Training Loss: 0.2178\n",
      "Epoch 4/5, Training Loss: 0.1210\n",
      "Epoch 4/5, Training Loss: 0.3342\n",
      "Epoch 4/5, Training Loss: 0.1839\n",
      "Epoch 4/5, Training Loss: 0.3001\n",
      "Epoch 4/5, Training Loss: 0.1005\n",
      "Epoch 4/5, Training Loss: 0.1275\n",
      "Epoch 4/5, Training Loss: 0.2009\n",
      "Epoch 4/5, Training Loss: 0.1451\n",
      "Epoch 4/5, Training Loss: 0.3222\n",
      "Epoch 4/5, Training Loss: 0.2113\n",
      "Epoch 4/5, Training Loss: 0.2573\n",
      "Epoch 4/5, Training Loss: 0.2603\n",
      "Epoch 4/5, Training Loss: 0.1317\n",
      "Epoch 4/5, Training Loss: 0.2659\n",
      "Epoch 4/5, Training Loss: 0.3435\n",
      "Epoch 4/5, Training Loss: 0.2652\n",
      "Epoch 4/5, Training Loss: 0.1269\n",
      "Epoch 4/5, Training Loss: 0.1813\n",
      "Epoch 4/5, Training Loss: 0.1765\n",
      "Epoch 4/5, Training Loss: 0.1454\n",
      "Epoch 4/5, Training Loss: 0.1365\n",
      "Epoch 4/5, Training Loss: 0.1116\n",
      "Epoch 4/5, Training Loss: 0.2453\n",
      "Epoch 4/5, Training Loss: 0.2830\n",
      "Epoch 4/5, Training Loss: 0.1528\n",
      "Epoch 4/5, Training Loss: 0.1751\n",
      "Epoch 4/5, Training Loss: 0.1411\n",
      "Epoch 4/5, Training Loss: 0.1474\n",
      "Epoch 4/5, Training Loss: 0.2218\n",
      "Epoch 4/5, Training Loss: 0.1687\n",
      "Epoch 4/5, Training Loss: 0.2138\n",
      "Epoch 4/5, Training Loss: 0.3785\n",
      "Epoch 4/5, Training Loss: 0.2059\n",
      "Epoch 4/5, Training Loss: 0.2533\n",
      "Epoch 4/5, Training Loss: 0.4153\n",
      "Epoch 4/5, Training Loss: 0.0917\n",
      "Epoch 4/5, Training Loss: 0.2162\n",
      "Epoch 4/5, Training Loss: 0.1399\n",
      "Epoch 4/5, Training Loss: 0.2409\n",
      "Epoch 4/5, Training Loss: 0.0738\n",
      "Epoch 4/5, Training Loss: 0.2299\n",
      "Epoch 4/5, Training Loss: 0.1357\n",
      "Epoch 4/5, Training Loss: 0.1937\n",
      "Epoch 4/5, Training Loss: 0.2271\n",
      "Epoch 4/5, Training Loss: 0.2602\n",
      "Epoch 4/5, Training Loss: 0.1627\n",
      "Epoch 4/5, Training Loss: 0.1989\n",
      "Epoch 4/5, Training Loss: 0.3252\n",
      "Epoch 4/5, Training Loss: 0.2690\n",
      "Epoch 4/5, Training Loss: 0.2126\n",
      "Epoch 4/5, Training Loss: 0.3117\n",
      "Epoch 4/5, Training Loss: 0.3213\n",
      "Epoch 4/5, Training Loss: 0.1897\n",
      "Epoch 4/5, Training Loss: 0.2417\n",
      "Epoch 4/5, Training Loss: 0.1694\n",
      "Epoch 4/5, Training Loss: 0.3531\n",
      "Epoch 4/5, Training Loss: 0.2307\n",
      "Epoch 4/5, Training Loss: 0.1791\n",
      "Epoch 4/5, Training Loss: 0.2336\n",
      "Epoch 4/5, Training Loss: 0.2387\n",
      "Epoch 4/5, Training Loss: 0.1545\n",
      "Epoch 4/5, Training Loss: 0.1817\n",
      "Epoch 4/5, Training Loss: 0.2285\n",
      "Epoch 4/5, Training Loss: 0.2666\n",
      "Epoch 4/5, Training Loss: 0.3176\n",
      "Epoch 4/5, Training Loss: 0.2527\n",
      "Epoch 4/5, Training Loss: 0.1876\n",
      "Epoch 4/5, Training Loss: 0.2178\n",
      "Epoch 4/5, Training Loss: 0.2497\n",
      "Epoch 4/5, Training Loss: 0.0628\n",
      "Epoch 4/5, Training Loss: 0.1858\n",
      "Epoch 4/5, Training Loss: 0.1949\n",
      "Epoch 4/5, Training Loss: 0.3051\n",
      "Epoch 4/5, Training Loss: 0.1479\n",
      "Epoch 4/5, Training Loss: 0.3168\n",
      "Epoch 4/5, Training Loss: 0.1597\n",
      "Epoch 4/5, Training Loss: 0.2837\n",
      "Epoch 4/5, Training Loss: 0.0937\n",
      "Epoch 4/5, Training Loss: 0.2481\n",
      "Epoch 4/5, Training Loss: 0.2768\n",
      "Epoch 4/5, Training Loss: 0.2650\n",
      "Epoch 4/5, Training Loss: 0.2968\n",
      "Epoch 4/5, Training Loss: 0.2142\n",
      "Epoch 4/5, Training Loss: 0.2186\n",
      "Epoch 4/5, Training Loss: 0.1338\n",
      "Epoch 4/5, Training Loss: 0.2229\n",
      "Epoch 4/5, Training Loss: 0.2244\n",
      "Epoch 4/5, Training Loss: 0.3333\n",
      "Epoch 4/5, Training Loss: 0.1513\n",
      "Epoch 4/5, Training Loss: 0.2227\n",
      "Epoch 4/5, Training Loss: 0.3162\n",
      "Epoch 4/5, Training Loss: 0.1767\n",
      "Epoch 4/5, Training Loss: 0.3098\n",
      "Epoch 4/5, Training Loss: 0.1597\n",
      "Epoch 4/5, Training Loss: 0.1445\n",
      "Epoch 4/5, Training Loss: 0.1418\n",
      "Epoch 4/5, Training Loss: 0.1923\n",
      "Epoch 4/5, Training Loss: 0.2859\n",
      "Epoch 4/5, Training Loss: 0.2479\n",
      "Epoch 4/5, Training Loss: 0.1764\n",
      "Epoch 4/5, Training Loss: 0.2211\n",
      "Epoch 4/5, Training Loss: 0.2312\n",
      "Epoch 4/5, Training Loss: 0.1367\n",
      "Epoch 4/5, Training Loss: 0.3599\n",
      "Epoch 4/5, Training Loss: 0.2488\n",
      "Epoch 4/5, Training Loss: 0.4186\n",
      "Epoch 4/5, Training Loss: 0.3606\n",
      "Epoch 4/5, Training Loss: 0.2292\n",
      "Epoch 4/5, Training Loss: 0.1858\n",
      "Epoch 4/5, Training Loss: 0.4222\n",
      "Epoch 4/5, Training Loss: 0.4067\n",
      "Epoch 4/5, Training Loss: 0.2737\n",
      "Epoch 4/5, Training Loss: 0.2403\n",
      "Epoch 4/5, Training Loss: 0.2288\n",
      "Epoch 4/5, Training Loss: 0.2541\n",
      "Epoch 4/5, Training Loss: 0.1537\n",
      "Epoch 4/5, Training Loss: 0.2219\n",
      "Epoch 4/5, Training Loss: 0.3471\n",
      "Epoch 4/5, Training Loss: 0.2676\n",
      "Epoch 4/5, Training Loss: 0.2717\n",
      "Epoch 4/5, Training Loss: 0.2334\n",
      "Epoch 4/5, Training Loss: 0.3307\n",
      "Epoch 4/5, Training Loss: 0.3400\n",
      "Epoch 4/5, Training Loss: 0.3429\n",
      "Epoch 4/5, Training Loss: 0.1834\n",
      "Epoch 4/5, Training Loss: 0.2064\n",
      "Epoch 4/5, Training Loss: 0.1602\n",
      "Epoch 4/5, Training Loss: 0.2936\n",
      "Epoch 4/5, Training Loss: 0.2237\n",
      "Epoch 4/5, Training Loss: 0.2728\n",
      "Epoch 4/5, Training Loss: 0.2985\n",
      "Epoch 4/5, Training Loss: 0.1052\n",
      "Epoch 4/5, Training Loss: 0.3251\n",
      "Epoch 4/5, Training Loss: 0.3554\n",
      "Epoch 4/5, Training Loss: 0.3326\n",
      "Epoch 4/5, Training Loss: 0.2430\n",
      "Epoch 4/5, Training Loss: 0.3957\n",
      "Epoch 4/5, Training Loss: 0.1756\n",
      "Epoch 4/5, Training Loss: 0.1825\n",
      "Epoch 4/5, Training Loss: 0.3033\n",
      "Epoch 4/5, Training Loss: 0.2394\n",
      "Epoch 4/5, Training Loss: 0.2700\n",
      "Epoch 4/5, Training Loss: 0.3043\n",
      "Epoch 4/5, Training Loss: 0.3335\n",
      "Epoch 4/5, Training Loss: 0.1882\n",
      "Epoch 4/5, Training Loss: 0.2534\n",
      "Epoch 4/5, Training Loss: 0.0935\n",
      "Epoch 4/5, Training Loss: 0.2316\n",
      "Epoch 4/5, Training Loss: 0.2230\n",
      "Epoch 4/5, Training Loss: 0.2512\n",
      "Epoch 4/5, Training Loss: 0.2441\n",
      "Epoch 4/5, Training Loss: 0.1768\n",
      "Epoch 4/5, Training Loss: 0.2705\n",
      "Epoch 4/5, Training Loss: 0.1982\n",
      "Epoch 4/5, Training Loss: 0.1430\n",
      "Epoch 4/5, Training Loss: 0.2928\n",
      "Epoch 4/5, Training Loss: 0.2904\n",
      "Epoch 4/5, Training Loss: 0.1588\n",
      "Epoch 4/5, Training Loss: 0.4442\n",
      "Epoch 4/5, Training Loss: 0.2923\n",
      "Epoch 4/5, Training Loss: 0.1992\n",
      "Epoch 4/5, Training Loss: 0.2853\n",
      "Epoch 4/5, Training Loss: 0.2396\n",
      "Epoch 4/5, Training Loss: 0.1664\n",
      "Epoch 4/5, Training Loss: 0.2338\n",
      "Epoch 4/5, Training Loss: 0.2675\n",
      "Epoch 4/5, Training Loss: 0.3723\n",
      "Epoch 4/5, Training Loss: 0.3447\n",
      "Epoch 4/5, Training Loss: 0.2226\n",
      "Epoch 4/5, Training Loss: 0.1324\n",
      "Epoch 4/5, Training Loss: 0.2966\n",
      "Epoch 4/5, Training Loss: 0.1977\n",
      "Epoch 4/5, Training Loss: 0.1139\n",
      "Epoch 4/5, Training Loss: 0.3289\n",
      "Epoch 4/5, Training Loss: 0.1928\n",
      "Epoch 4/5, Training Loss: 0.3589\n",
      "Epoch 4/5, Training Loss: 0.1470\n",
      "Epoch 4/5, Training Loss: 0.2833\n",
      "Epoch 4/5, Training Loss: 0.2017\n",
      "Epoch 4/5, Training Loss: 0.1720\n",
      "Epoch 4/5, Training Loss: 0.2902\n",
      "Epoch 4/5, Training Loss: 0.3657\n",
      "Epoch 4/5, Training Loss: 0.2350\n",
      "Epoch 4/5, Training Loss: 0.6118\n",
      "Epoch 4/5, Training Loss: 0.3792\n",
      "Epoch 4/5, Training Loss: 0.4378\n",
      "Epoch 4/5, Training Loss: 0.0902\n",
      "Epoch 4/5, Training Loss: 0.3576\n",
      "Epoch 4/5, Training Loss: 0.2929\n",
      "Epoch 4/5, Training Loss: 0.1709\n",
      "Epoch 4/5, Training Loss: 0.1295\n",
      "Epoch 4/5, Training Loss: 0.3047\n",
      "Epoch 4/5, Training Loss: 0.4065\n",
      "Epoch 4/5, Training Loss: 0.3903\n",
      "Epoch 4/5, Training Loss: 0.3714\n",
      "Epoch 4/5, Training Loss: 0.2057\n",
      "Epoch 4/5, Training Loss: 0.3178\n",
      "Epoch 4/5, Training Loss: 0.2027\n",
      "Epoch 4/5, Training Loss: 0.2184\n",
      "Epoch 4/5, Training Loss: 0.2186\n",
      "Epoch 4/5, Training Loss: 0.1969\n",
      "Epoch 4/5, Training Loss: 0.3565\n",
      "Epoch 4/5, Training Loss: 0.2772\n",
      "Epoch 4/5, Training Loss: 0.2427\n",
      "Epoch 4/5, Training Loss: 0.1941\n",
      "Epoch 4/5, Training Loss: 0.2501\n",
      "Epoch 4/5, Training Loss: 0.2863\n",
      "Epoch 4/5, Training Loss: 0.2617\n",
      "Epoch 4/5, Training Loss: 0.2034\n",
      "Epoch 4/5, Training Loss: 0.2958\n",
      "Epoch 4/5, Training Loss: 0.2732\n",
      "Epoch 4/5, Training Loss: 0.1325\n",
      "Epoch 4/5, Training Loss: 0.2404\n",
      "Epoch 4/5, Training Loss: 0.1112\n",
      "Epoch 4/5, Training Loss: 0.5463\n",
      "Epoch 4/5, Training Loss: 0.3653\n",
      "Epoch 4/5, Training Loss: 0.0855\n",
      "Epoch 4/5, Training Loss: 0.4941\n",
      "Epoch 4/5, Training Loss: 0.2066\n",
      "Epoch 4/5, Training Loss: 0.0995\n",
      "Epoch 4/5, Training Loss: 0.2946\n",
      "Epoch 4/5, Training Loss: 0.1217\n",
      "Epoch 4/5, Training Loss: 0.1235\n",
      "Epoch 4/5, Training Loss: 0.2485\n",
      "Epoch 4/5, Training Loss: 0.2846\n",
      "Epoch 4/5, Training Loss: 0.2021\n",
      "Epoch 4/5, Training Loss: 0.2162\n",
      "Epoch 4/5, Training Loss: 0.2769\n",
      "Epoch 4/5, Training Loss: 0.3301\n",
      "Epoch 4/5, Training Loss: 0.3893\n",
      "Epoch 4/5, Training Loss: 0.1503\n",
      "Epoch 4/5, Training Loss: 0.2021\n",
      "Epoch 4/5, Training Loss: 0.1454\n",
      "Epoch 4/5, Training Loss: 0.1414\n",
      "Epoch 4/5, Training Loss: 0.1836\n",
      "Epoch 4/5, Training Loss: 0.2073\n",
      "Epoch 4/5, Training Loss: 0.1647\n",
      "Epoch 4/5, Training Loss: 0.3452\n",
      "Epoch 4/5, Training Loss: 0.3038\n",
      "Epoch 4/5, Training Loss: 0.2557\n",
      "Epoch 4/5, Training Loss: 0.4510\n",
      "Epoch 4/5, Training Loss: 0.2773\n",
      "Epoch 4/5, Training Loss: 0.2897\n",
      "Epoch 4/5, Training Loss: 0.2756\n",
      "Epoch 4/5, Training Loss: 0.1694\n",
      "Epoch 4/5, Training Loss: 0.2617\n",
      "Epoch 4/5, Training Loss: 0.0878\n",
      "Epoch 4/5, Training Loss: 0.1266\n",
      "Epoch 4/5, Training Loss: 0.1833\n",
      "Epoch 4/5, Training Loss: 0.3054\n",
      "Epoch 4/5, Training Loss: 0.1270\n",
      "Epoch 4/5, Training Loss: 0.1658\n",
      "Epoch 4/5, Training Loss: 0.3256\n",
      "Epoch 4/5, Training Loss: 0.1642\n",
      "Epoch 4/5, Training Loss: 0.1899\n",
      "Epoch 4/5, Training Loss: 0.1892\n",
      "Epoch 4/5, Training Loss: 0.1286\n",
      "Epoch 4/5, Training Loss: 0.2036\n",
      "Epoch 4/5, Training Loss: 0.3016\n",
      "Epoch 4/5, Training Loss: 0.4681\n",
      "Epoch 4/5, Training Loss: 0.3868\n",
      "Epoch 4/5, Training Loss: 0.2508\n",
      "Epoch 4/5, Training Loss: 0.1493\n",
      "Epoch 4/5, Training Loss: 0.1754\n",
      "Epoch 4/5, Training Loss: 0.2007\n",
      "Epoch 4/5, Training Loss: 0.1713\n",
      "Epoch 4/5, Training Loss: 0.4242\n",
      "Epoch 4/5, Training Loss: 0.3380\n",
      "Epoch 4/5, Training Loss: 0.1265\n",
      "Epoch 4/5, Training Loss: 0.2892\n",
      "Epoch 4/5, Training Loss: 0.4102\n",
      "Epoch 4/5, Training Loss: 0.2086\n",
      "Epoch 4/5, Training Loss: 0.2411\n",
      "Epoch 4/5, Training Loss: 0.0759\n",
      "Epoch 4/5, Training Loss: 0.2959\n",
      "Epoch 4/5, Training Loss: 0.3603\n",
      "Epoch 4/5, Training Loss: 0.2559\n",
      "Epoch 4/5, Training Loss: 0.3466\n",
      "Epoch 4/5, Training Loss: 0.4783\n",
      "Epoch 4/5, Training Loss: 0.3020\n",
      "Epoch 4/5, Training Loss: 0.2515\n",
      "Epoch 4/5, Training Loss: 0.3945\n",
      "Epoch 4/5, Training Loss: 0.1295\n",
      "Epoch 4/5, Training Loss: 0.3077\n",
      "Epoch 4/5, Training Loss: 0.1602\n",
      "Epoch 4/5, Training Loss: 0.1968\n",
      "Epoch 4/5, Training Loss: 0.1719\n",
      "Epoch 4/5, Training Loss: 0.2574\n",
      "Epoch 4/5, Training Loss: 0.2466\n",
      "Epoch 4/5, Training Loss: 0.2725\n",
      "Epoch 4/5, Training Loss: 0.2312\n",
      "Epoch 4/5, Training Loss: 0.2895\n",
      "Epoch 4/5, Training Loss: 0.1576\n",
      "Epoch 4/5, Training Loss: 0.3127\n",
      "Epoch 4/5, Training Loss: 0.2166\n",
      "Epoch 4/5, Training Loss: 0.1618\n",
      "Epoch 4/5, Training Loss: 0.2695\n",
      "Epoch 4/5, Training Loss: 0.1355\n",
      "Epoch 4/5, Training Loss: 0.1667\n",
      "Epoch 4/5, Training Loss: 0.2765\n",
      "Epoch 4/5, Training Loss: 0.2552\n",
      "Epoch 4/5, Training Loss: 0.1681\n",
      "Epoch 4/5, Training Loss: 0.2038\n",
      "Epoch 4/5, Training Loss: 0.1059\n",
      "Epoch 4/5, Training Loss: 0.2617\n",
      "Epoch 4/5, Training Loss: 0.1223\n",
      "Epoch 4/5, Training Loss: 0.2359\n",
      "Epoch 4/5, Training Loss: 0.2044\n",
      "Epoch 4/5, Training Loss: 0.2879\n",
      "Epoch 4/5, Training Loss: 0.2514\n",
      "Epoch 4/5, Training Loss: 0.2696\n",
      "Epoch 4/5, Training Loss: 0.0588\n",
      "Epoch 4/5, Training Loss: 0.3027\n",
      "Epoch 4/5, Training Loss: 0.2621\n",
      "Epoch 4/5, Training Loss: 0.1820\n",
      "Epoch 4/5, Training Loss: 0.2546\n",
      "Epoch 4/5, Training Loss: 0.2007\n",
      "Epoch 4/5, Training Loss: 0.2630\n",
      "Epoch 4/5, Training Loss: 0.2651\n",
      "Epoch 4/5, Training Loss: 0.2044\n",
      "Epoch 4/5, Training Loss: 0.2392\n",
      "Epoch 4/5, Training Loss: 0.1646\n",
      "Epoch 4/5, Training Loss: 0.3504\n",
      "Epoch 4/5, Training Loss: 0.2920\n",
      "Epoch 4/5, Training Loss: 0.2198\n",
      "Epoch 4/5, Training Loss: 0.2506\n",
      "Epoch 4/5, Training Loss: 0.1325\n",
      "Epoch 4/5, Training Loss: 0.2366\n",
      "Epoch 4/5, Training Loss: 0.3853\n",
      "Epoch 4/5, Training Loss: 0.1516\n",
      "Epoch 4/5, Training Loss: 0.2294\n",
      "Epoch 4/5, Training Loss: 0.2728\n",
      "Epoch 4/5, Training Loss: 0.1522\n",
      "Epoch 4/5, Training Loss: 0.1458\n",
      "Epoch 4/5, Training Loss: 0.1026\n",
      "Epoch 4/5, Training Loss: 0.2183\n",
      "Epoch 4/5, Training Loss: 0.1866\n",
      "Epoch 4/5, Training Loss: 0.1167\n",
      "Epoch 4/5, Training Loss: 0.1388\n",
      "Epoch 4/5, Training Loss: 0.1995\n",
      "Epoch 4/5, Training Loss: 0.4437\n",
      "Epoch 4/5, Training Loss: 0.3386\n",
      "Epoch 4/5, Training Loss: 0.1076\n",
      "Epoch 4/5, Training Loss: 0.2656\n",
      "Epoch 4/5, Training Loss: 0.1579\n",
      "Epoch 4/5, Training Loss: 0.2316\n",
      "Epoch 4/5, Training Loss: 0.2369\n",
      "Epoch 4/5, Training Loss: 0.1491\n",
      "Epoch 4/5, Training Loss: 0.1676\n",
      "Epoch 4/5, Training Loss: 0.2007\n",
      "Epoch 4/5, Training Loss: 0.2064\n",
      "Epoch 4/5, Training Loss: 0.1906\n",
      "Epoch 4/5, Training Loss: 0.0961\n",
      "Epoch 4/5, Training Loss: 0.3476\n",
      "Epoch 4/5, Training Loss: 0.1729\n",
      "Epoch 4/5, Training Loss: 0.2694\n",
      "Epoch 4/5, Training Loss: 0.2320\n",
      "Epoch 4/5, Training Loss: 0.2897\n",
      "Epoch 4/5, Training Loss: 0.2013\n",
      "Epoch 4/5, Training Loss: 0.1348\n",
      "Epoch 4/5, Training Loss: 0.4424\n",
      "Epoch 4/5, Training Loss: 0.4573\n",
      "Epoch 4/5, Training Loss: 0.2735\n",
      "Epoch 4/5, Training Loss: 0.2232\n",
      "Epoch 4/5, Training Loss: 0.1914\n",
      "Epoch 4/5, Training Loss: 0.3104\n",
      "Epoch 4/5, Training Loss: 0.2435\n",
      "Epoch 4/5, Training Loss: 0.1616\n",
      "Epoch 4/5, Training Loss: 0.1282\n",
      "Epoch 4/5, Training Loss: 0.3158\n",
      "Epoch 4/5, Training Loss: 0.2781\n",
      "Epoch 4/5, Training Loss: 0.2513\n",
      "Epoch 4/5, Training Loss: 0.2237\n",
      "Epoch 4/5, Training Loss: 0.2648\n",
      "Epoch 4/5, Training Loss: 0.1381\n",
      "Epoch 4/5, Training Loss: 0.2691\n",
      "Epoch 4/5, Training Loss: 0.2368\n",
      "Epoch 4/5, Training Loss: 0.2194\n",
      "Epoch 4/5, Training Loss: 0.2589\n",
      "Epoch 4/5, Training Loss: 0.2105\n",
      "Epoch 4/5, Training Loss: 0.2636\n",
      "Epoch 4/5, Training Loss: 0.1986\n",
      "Epoch 4/5, Training Loss: 0.2109\n",
      "Epoch 4/5, Training Loss: 0.2746\n",
      "Epoch 4/5, Training Loss: 0.3035\n",
      "Epoch 4/5, Training Loss: 0.3050\n",
      "Epoch 4/5, Training Loss: 0.1650\n",
      "Epoch 4/5, Training Loss: 0.2376\n",
      "Epoch 4/5, Training Loss: 0.1967\n",
      "Epoch 4/5, Training Loss: 0.4021\n",
      "Epoch 4/5, Training Loss: 0.1452\n",
      "Epoch 4/5, Training Loss: 0.2219\n",
      "Epoch 4/5, Training Loss: 0.2843\n",
      "Epoch 4/5, Training Loss: 0.2429\n",
      "Epoch 4/5, Training Loss: 0.2019\n",
      "Epoch 4/5, Training Loss: 0.4010\n",
      "Epoch 4/5, Training Loss: 0.3540\n",
      "Epoch 4/5, Training Loss: 0.4150\n",
      "Epoch 4/5, Training Loss: 0.2642\n",
      "Epoch 4/5, Training Loss: 0.1451\n",
      "Epoch 4/5, Training Loss: 0.3676\n",
      "Epoch 4/5, Training Loss: 0.6642\n",
      "Epoch 4/5, Training Loss: 0.3510\n",
      "Epoch 4/5, Training Loss: 0.3117\n",
      "Epoch 4/5, Training Loss: 0.3015\n",
      "Epoch 4/5, Training Loss: 0.3558\n",
      "Epoch 4/5, Training Loss: 0.2838\n",
      "Epoch 4/5, Training Loss: 0.1788\n",
      "Epoch 4/5, Training Loss: 0.2387\n",
      "Epoch 4/5, Training Loss: 0.3395\n",
      "Epoch 4/5, Training Loss: 0.3046\n",
      "Epoch 4/5, Training Loss: 0.2526\n",
      "Epoch 4/5, Training Loss: 0.2004\n",
      "Epoch 4/5, Training Loss: 0.3588\n",
      "Epoch 4/5, Training Loss: 0.2913\n",
      "Epoch 4/5, Training Loss: 0.2150\n",
      "Epoch 4/5, Training Loss: 0.2198\n",
      "Epoch 4/5, Training Loss: 0.2982\n",
      "Epoch 4/5, Training Loss: 0.5484\n",
      "Epoch 4/5, Training Loss: 0.1639\n",
      "Epoch 4/5, Training Loss: 0.3362\n",
      "Epoch 4/5, Training Loss: 0.2340\n",
      "Epoch 4/5, Training Loss: 0.2268\n",
      "Epoch 4/5, Training Loss: 0.3085\n",
      "Epoch 4/5, Training Loss: 0.2133\n",
      "Epoch 4/5, Training Loss: 0.2133, Cross-Validation Loss: 0.4292\n",
      "Epoch 5/5, Training Loss: 0.2546\n",
      "Epoch 5/5, Training Loss: 0.1214\n",
      "Epoch 5/5, Training Loss: 0.2136\n",
      "Epoch 5/5, Training Loss: 0.2364\n",
      "Epoch 5/5, Training Loss: 0.0845\n",
      "Epoch 5/5, Training Loss: 0.1027\n",
      "Epoch 5/5, Training Loss: 0.1235\n",
      "Epoch 5/5, Training Loss: 0.2821\n",
      "Epoch 5/5, Training Loss: 0.0913\n",
      "Epoch 5/5, Training Loss: 0.1239\n",
      "Epoch 5/5, Training Loss: 0.1042\n",
      "Epoch 5/5, Training Loss: 0.1201\n",
      "Epoch 5/5, Training Loss: 0.1652\n",
      "Epoch 5/5, Training Loss: 0.1357\n",
      "Epoch 5/5, Training Loss: 0.1378\n",
      "Epoch 5/5, Training Loss: 0.0923\n",
      "Epoch 5/5, Training Loss: 0.1756\n",
      "Epoch 5/5, Training Loss: 0.0521\n",
      "Epoch 5/5, Training Loss: 0.1149\n",
      "Epoch 5/5, Training Loss: 0.1347\n",
      "Epoch 5/5, Training Loss: 0.2069\n",
      "Epoch 5/5, Training Loss: 0.1105\n",
      "Epoch 5/5, Training Loss: 0.1910\n",
      "Epoch 5/5, Training Loss: 0.1425\n",
      "Epoch 5/5, Training Loss: 0.0989\n",
      "Epoch 5/5, Training Loss: 0.1158\n",
      "Epoch 5/5, Training Loss: 0.1263\n",
      "Epoch 5/5, Training Loss: 0.1733\n",
      "Epoch 5/5, Training Loss: 0.0523\n",
      "Epoch 5/5, Training Loss: 0.0521\n",
      "Epoch 5/5, Training Loss: 0.0402\n",
      "Epoch 5/5, Training Loss: 0.0455\n",
      "Epoch 5/5, Training Loss: 0.0891\n",
      "Epoch 5/5, Training Loss: 0.1080\n",
      "Epoch 5/5, Training Loss: 0.0907\n",
      "Epoch 5/5, Training Loss: 0.1608\n",
      "Epoch 5/5, Training Loss: 0.1354\n",
      "Epoch 5/5, Training Loss: 0.0926\n",
      "Epoch 5/5, Training Loss: 0.0977\n",
      "Epoch 5/5, Training Loss: 0.1232\n",
      "Epoch 5/5, Training Loss: 0.1397\n",
      "Epoch 5/5, Training Loss: 0.1297\n",
      "Epoch 5/5, Training Loss: 0.0747\n",
      "Epoch 5/5, Training Loss: 0.1190\n",
      "Epoch 5/5, Training Loss: 0.2061\n",
      "Epoch 5/5, Training Loss: 0.1807\n",
      "Epoch 5/5, Training Loss: 0.1181\n",
      "Epoch 5/5, Training Loss: 0.2109\n",
      "Epoch 5/5, Training Loss: 0.0761\n",
      "Epoch 5/5, Training Loss: 0.1747\n",
      "Epoch 5/5, Training Loss: 0.1060\n",
      "Epoch 5/5, Training Loss: 0.1732\n",
      "Epoch 5/5, Training Loss: 0.0554\n",
      "Epoch 5/5, Training Loss: 0.1888\n",
      "Epoch 5/5, Training Loss: 0.1843\n",
      "Epoch 5/5, Training Loss: 0.0956\n",
      "Epoch 5/5, Training Loss: 0.0816\n",
      "Epoch 5/5, Training Loss: 0.2118\n",
      "Epoch 5/5, Training Loss: 0.0535\n",
      "Epoch 5/5, Training Loss: 0.1900\n",
      "Epoch 5/5, Training Loss: 0.0865\n",
      "Epoch 5/5, Training Loss: 0.0400\n",
      "Epoch 5/5, Training Loss: 0.2150\n",
      "Epoch 5/5, Training Loss: 0.0341\n",
      "Epoch 5/5, Training Loss: 0.0956\n",
      "Epoch 5/5, Training Loss: 0.3659\n",
      "Epoch 5/5, Training Loss: 0.1848\n",
      "Epoch 5/5, Training Loss: 0.0316\n",
      "Epoch 5/5, Training Loss: 0.0731\n",
      "Epoch 5/5, Training Loss: 0.1042\n",
      "Epoch 5/5, Training Loss: 0.0984\n",
      "Epoch 5/5, Training Loss: 0.0531\n",
      "Epoch 5/5, Training Loss: 0.0974\n",
      "Epoch 5/5, Training Loss: 0.2041\n",
      "Epoch 5/5, Training Loss: 0.0569\n",
      "Epoch 5/5, Training Loss: 0.0297\n",
      "Epoch 5/5, Training Loss: 0.0538\n",
      "Epoch 5/5, Training Loss: 0.1055\n",
      "Epoch 5/5, Training Loss: 0.1941\n",
      "Epoch 5/5, Training Loss: 0.1331\n",
      "Epoch 5/5, Training Loss: 0.0471\n",
      "Epoch 5/5, Training Loss: 0.0267\n",
      "Epoch 5/5, Training Loss: 0.0701\n",
      "Epoch 5/5, Training Loss: 0.0213\n",
      "Epoch 5/5, Training Loss: 0.1333\n",
      "Epoch 5/5, Training Loss: 0.0961\n",
      "Epoch 5/5, Training Loss: 0.1458\n",
      "Epoch 5/5, Training Loss: 0.1141\n",
      "Epoch 5/5, Training Loss: 0.1080\n",
      "Epoch 5/5, Training Loss: 0.0417\n",
      "Epoch 5/5, Training Loss: 0.1137\n",
      "Epoch 5/5, Training Loss: 0.2053\n",
      "Epoch 5/5, Training Loss: 0.1300\n",
      "Epoch 5/5, Training Loss: 0.1335\n",
      "Epoch 5/5, Training Loss: 0.1395\n",
      "Epoch 5/5, Training Loss: 0.1272\n",
      "Epoch 5/5, Training Loss: 0.1141\n",
      "Epoch 5/5, Training Loss: 0.0890\n",
      "Epoch 5/5, Training Loss: 0.0835\n",
      "Epoch 5/5, Training Loss: 0.1013\n",
      "Epoch 5/5, Training Loss: 0.1635\n",
      "Epoch 5/5, Training Loss: 0.0997\n",
      "Epoch 5/5, Training Loss: 0.0928\n",
      "Epoch 5/5, Training Loss: 0.1647\n",
      "Epoch 5/5, Training Loss: 0.0408\n",
      "Epoch 5/5, Training Loss: 0.1023\n",
      "Epoch 5/5, Training Loss: 0.1579\n",
      "Epoch 5/5, Training Loss: 0.2626\n",
      "Epoch 5/5, Training Loss: 0.2393\n",
      "Epoch 5/5, Training Loss: 0.0455\n",
      "Epoch 5/5, Training Loss: 0.1111\n",
      "Epoch 5/5, Training Loss: 0.1909\n",
      "Epoch 5/5, Training Loss: 0.1596\n",
      "Epoch 5/5, Training Loss: 0.1900\n",
      "Epoch 5/5, Training Loss: 0.0901\n",
      "Epoch 5/5, Training Loss: 0.0582\n",
      "Epoch 5/5, Training Loss: 0.0843\n",
      "Epoch 5/5, Training Loss: 0.0747\n",
      "Epoch 5/5, Training Loss: 0.0669\n",
      "Epoch 5/5, Training Loss: 0.0828\n",
      "Epoch 5/5, Training Loss: 0.0819\n",
      "Epoch 5/5, Training Loss: 0.1375\n",
      "Epoch 5/5, Training Loss: 0.1003\n",
      "Epoch 5/5, Training Loss: 0.1252\n",
      "Epoch 5/5, Training Loss: 0.2040\n",
      "Epoch 5/5, Training Loss: 0.2127\n",
      "Epoch 5/5, Training Loss: 0.1899\n",
      "Epoch 5/5, Training Loss: 0.1350\n",
      "Epoch 5/5, Training Loss: 0.1076\n",
      "Epoch 5/5, Training Loss: 0.0400\n",
      "Epoch 5/5, Training Loss: 0.1341\n",
      "Epoch 5/5, Training Loss: 0.1495\n",
      "Epoch 5/5, Training Loss: 0.1207\n",
      "Epoch 5/5, Training Loss: 0.1310\n",
      "Epoch 5/5, Training Loss: 0.3553\n",
      "Epoch 5/5, Training Loss: 0.2366\n",
      "Epoch 5/5, Training Loss: 0.0591\n",
      "Epoch 5/5, Training Loss: 0.0626\n",
      "Epoch 5/5, Training Loss: 0.0420\n",
      "Epoch 5/5, Training Loss: 0.0698\n",
      "Epoch 5/5, Training Loss: 0.1770\n",
      "Epoch 5/5, Training Loss: 0.0530\n",
      "Epoch 5/5, Training Loss: 0.2335\n",
      "Epoch 5/5, Training Loss: 0.1897\n",
      "Epoch 5/5, Training Loss: 0.2338\n",
      "Epoch 5/5, Training Loss: 0.1797\n",
      "Epoch 5/5, Training Loss: 0.1268\n",
      "Epoch 5/5, Training Loss: 0.1398\n",
      "Epoch 5/5, Training Loss: 0.1103\n",
      "Epoch 5/5, Training Loss: 0.1194\n",
      "Epoch 5/5, Training Loss: 0.4028\n",
      "Epoch 5/5, Training Loss: 0.2184\n",
      "Epoch 5/5, Training Loss: 0.0718\n",
      "Epoch 5/5, Training Loss: 0.0205\n",
      "Epoch 5/5, Training Loss: 0.0927\n",
      "Epoch 5/5, Training Loss: 0.0850\n",
      "Epoch 5/5, Training Loss: 0.0431\n",
      "Epoch 5/5, Training Loss: 0.1743\n",
      "Epoch 5/5, Training Loss: 0.2356\n",
      "Epoch 5/5, Training Loss: 0.1883\n",
      "Epoch 5/5, Training Loss: 0.0933\n",
      "Epoch 5/5, Training Loss: 0.2882\n",
      "Epoch 5/5, Training Loss: 0.0728\n",
      "Epoch 5/5, Training Loss: 0.0800\n",
      "Epoch 5/5, Training Loss: 0.2162\n",
      "Epoch 5/5, Training Loss: 0.2369\n",
      "Epoch 5/5, Training Loss: 0.1785\n",
      "Epoch 5/5, Training Loss: 0.2576\n",
      "Epoch 5/5, Training Loss: 0.2049\n",
      "Epoch 5/5, Training Loss: 0.0741\n",
      "Epoch 5/5, Training Loss: 0.1586\n",
      "Epoch 5/5, Training Loss: 0.0957\n",
      "Epoch 5/5, Training Loss: 0.1207\n",
      "Epoch 5/5, Training Loss: 0.2168\n",
      "Epoch 5/5, Training Loss: 0.1654\n",
      "Epoch 5/5, Training Loss: 0.1404\n",
      "Epoch 5/5, Training Loss: 0.0301\n",
      "Epoch 5/5, Training Loss: 0.1440\n",
      "Epoch 5/5, Training Loss: 0.1666\n",
      "Epoch 5/5, Training Loss: 0.2934\n",
      "Epoch 5/5, Training Loss: 0.1566\n",
      "Epoch 5/5, Training Loss: 0.1107\n",
      "Epoch 5/5, Training Loss: 0.1913\n",
      "Epoch 5/5, Training Loss: 0.3876\n",
      "Epoch 5/5, Training Loss: 0.0988\n",
      "Epoch 5/5, Training Loss: 0.0860\n",
      "Epoch 5/5, Training Loss: 0.1939\n",
      "Epoch 5/5, Training Loss: 0.2760\n",
      "Epoch 5/5, Training Loss: 0.0941\n",
      "Epoch 5/5, Training Loss: 0.2370\n",
      "Epoch 5/5, Training Loss: 0.3778\n",
      "Epoch 5/5, Training Loss: 0.1901\n",
      "Epoch 5/5, Training Loss: 0.1787\n",
      "Epoch 5/5, Training Loss: 0.1820\n",
      "Epoch 5/5, Training Loss: 0.1164\n",
      "Epoch 5/5, Training Loss: 0.2466\n",
      "Epoch 5/5, Training Loss: 0.2146\n",
      "Epoch 5/5, Training Loss: 0.2291\n",
      "Epoch 5/5, Training Loss: 0.1969\n",
      "Epoch 5/5, Training Loss: 0.2286\n",
      "Epoch 5/5, Training Loss: 0.0806\n",
      "Epoch 5/5, Training Loss: 0.0622\n",
      "Epoch 5/5, Training Loss: 0.3699\n",
      "Epoch 5/5, Training Loss: 0.1938\n",
      "Epoch 5/5, Training Loss: 0.2135\n",
      "Epoch 5/5, Training Loss: 0.2080\n",
      "Epoch 5/5, Training Loss: 0.0530\n",
      "Epoch 5/5, Training Loss: 0.1551\n",
      "Epoch 5/5, Training Loss: 0.1402\n",
      "Epoch 5/5, Training Loss: 0.0758\n",
      "Epoch 5/5, Training Loss: 0.1769\n",
      "Epoch 5/5, Training Loss: 0.4192\n",
      "Epoch 5/5, Training Loss: 0.1987\n",
      "Epoch 5/5, Training Loss: 0.2103\n",
      "Epoch 5/5, Training Loss: 0.2956\n",
      "Epoch 5/5, Training Loss: 0.2586\n",
      "Epoch 5/5, Training Loss: 0.0938\n",
      "Epoch 5/5, Training Loss: 0.0678\n",
      "Epoch 5/5, Training Loss: 0.2272\n",
      "Epoch 5/5, Training Loss: 0.0412\n",
      "Epoch 5/5, Training Loss: 0.1916\n",
      "Epoch 5/5, Training Loss: 0.1391\n",
      "Epoch 5/5, Training Loss: 0.3677\n",
      "Epoch 5/5, Training Loss: 0.1566\n",
      "Epoch 5/5, Training Loss: 0.2069\n",
      "Epoch 5/5, Training Loss: 0.1807\n",
      "Epoch 5/5, Training Loss: 0.1986\n",
      "Epoch 5/5, Training Loss: 0.1765\n",
      "Epoch 5/5, Training Loss: 0.0850\n",
      "Epoch 5/5, Training Loss: 0.2295\n",
      "Epoch 5/5, Training Loss: 0.1931\n",
      "Epoch 5/5, Training Loss: 0.1244\n",
      "Epoch 5/5, Training Loss: 0.2488\n",
      "Epoch 5/5, Training Loss: 0.4041\n",
      "Epoch 5/5, Training Loss: 0.1753\n",
      "Epoch 5/5, Training Loss: 0.1244\n",
      "Epoch 5/5, Training Loss: 0.1223\n",
      "Epoch 5/5, Training Loss: 0.0836\n",
      "Epoch 5/5, Training Loss: 0.1765\n",
      "Epoch 5/5, Training Loss: 0.0668\n",
      "Epoch 5/5, Training Loss: 0.0948\n",
      "Epoch 5/5, Training Loss: 0.1595\n",
      "Epoch 5/5, Training Loss: 0.1942\n",
      "Epoch 5/5, Training Loss: 0.1620\n",
      "Epoch 5/5, Training Loss: 0.1130\n",
      "Epoch 5/5, Training Loss: 0.1257\n",
      "Epoch 5/5, Training Loss: 0.1847\n",
      "Epoch 5/5, Training Loss: 0.1140\n",
      "Epoch 5/5, Training Loss: 0.1679\n",
      "Epoch 5/5, Training Loss: 0.1447\n",
      "Epoch 5/5, Training Loss: 0.3017\n",
      "Epoch 5/5, Training Loss: 0.0349\n",
      "Epoch 5/5, Training Loss: 0.0578\n",
      "Epoch 5/5, Training Loss: 0.0670\n",
      "Epoch 5/5, Training Loss: 0.1772\n",
      "Epoch 5/5, Training Loss: 0.2083\n",
      "Epoch 5/5, Training Loss: 0.0658\n",
      "Epoch 5/5, Training Loss: 0.2067\n",
      "Epoch 5/5, Training Loss: 0.2662\n",
      "Epoch 5/5, Training Loss: 0.3158\n",
      "Epoch 5/5, Training Loss: 0.3628\n",
      "Epoch 5/5, Training Loss: 0.1651\n",
      "Epoch 5/5, Training Loss: 0.2809\n",
      "Epoch 5/5, Training Loss: 0.5083\n",
      "Epoch 5/5, Training Loss: 0.1621\n",
      "Epoch 5/5, Training Loss: 0.3235\n",
      "Epoch 5/5, Training Loss: 0.1700\n",
      "Epoch 5/5, Training Loss: 0.2271\n",
      "Epoch 5/5, Training Loss: 0.0849\n",
      "Epoch 5/5, Training Loss: 0.2533\n",
      "Epoch 5/5, Training Loss: 0.1800\n",
      "Epoch 5/5, Training Loss: 0.1702\n",
      "Epoch 5/5, Training Loss: 0.3008\n",
      "Epoch 5/5, Training Loss: 0.1219\n",
      "Epoch 5/5, Training Loss: 0.0772\n",
      "Epoch 5/5, Training Loss: 0.1883\n",
      "Epoch 5/5, Training Loss: 0.0726\n",
      "Epoch 5/5, Training Loss: 0.1525\n",
      "Epoch 5/5, Training Loss: 0.1615\n",
      "Epoch 5/5, Training Loss: 0.2677\n",
      "Epoch 5/5, Training Loss: 0.0894\n",
      "Epoch 5/5, Training Loss: 0.0878\n",
      "Epoch 5/5, Training Loss: 0.3378\n",
      "Epoch 5/5, Training Loss: 0.0612\n",
      "Epoch 5/5, Training Loss: 0.2630\n",
      "Epoch 5/5, Training Loss: 0.2605\n",
      "Epoch 5/5, Training Loss: 0.1350\n",
      "Epoch 5/5, Training Loss: 0.3239\n",
      "Epoch 5/5, Training Loss: 0.1795\n",
      "Epoch 5/5, Training Loss: 0.0666\n",
      "Epoch 5/5, Training Loss: 0.1301\n",
      "Epoch 5/5, Training Loss: 0.2223\n",
      "Epoch 5/5, Training Loss: 0.1011\n",
      "Epoch 5/5, Training Loss: 0.2635\n",
      "Epoch 5/5, Training Loss: 0.1256\n",
      "Epoch 5/5, Training Loss: 0.1284\n",
      "Epoch 5/5, Training Loss: 0.0546\n",
      "Epoch 5/5, Training Loss: 0.1525\n",
      "Epoch 5/5, Training Loss: 0.1910\n",
      "Epoch 5/5, Training Loss: 0.2132\n",
      "Epoch 5/5, Training Loss: 0.2575\n",
      "Epoch 5/5, Training Loss: 0.2025\n",
      "Epoch 5/5, Training Loss: 0.1761\n",
      "Epoch 5/5, Training Loss: 0.1047\n",
      "Epoch 5/5, Training Loss: 0.3464\n",
      "Epoch 5/5, Training Loss: 0.2202\n",
      "Epoch 5/5, Training Loss: 0.0961\n",
      "Epoch 5/5, Training Loss: 0.0719\n",
      "Epoch 5/5, Training Loss: 0.1980\n",
      "Epoch 5/5, Training Loss: 0.2414\n",
      "Epoch 5/5, Training Loss: 0.1637\n",
      "Epoch 5/5, Training Loss: 0.2081\n",
      "Epoch 5/5, Training Loss: 0.2593\n",
      "Epoch 5/5, Training Loss: 0.1943\n",
      "Epoch 5/5, Training Loss: 0.0812\n",
      "Epoch 5/5, Training Loss: 0.1968\n",
      "Epoch 5/5, Training Loss: 0.1271\n",
      "Epoch 5/5, Training Loss: 0.2266\n",
      "Epoch 5/5, Training Loss: 0.2075\n",
      "Epoch 5/5, Training Loss: 0.1797\n",
      "Epoch 5/5, Training Loss: 0.1068\n",
      "Epoch 5/5, Training Loss: 0.2954\n",
      "Epoch 5/5, Training Loss: 0.0756\n",
      "Epoch 5/5, Training Loss: 0.1775\n",
      "Epoch 5/5, Training Loss: 0.1640\n",
      "Epoch 5/5, Training Loss: 0.1163\n",
      "Epoch 5/5, Training Loss: 0.2124\n",
      "Epoch 5/5, Training Loss: 0.1028\n",
      "Epoch 5/5, Training Loss: 0.1523\n",
      "Epoch 5/5, Training Loss: 0.1217\n",
      "Epoch 5/5, Training Loss: 0.2189\n",
      "Epoch 5/5, Training Loss: 0.1985\n",
      "Epoch 5/5, Training Loss: 0.1164\n",
      "Epoch 5/5, Training Loss: 0.1832\n",
      "Epoch 5/5, Training Loss: 0.1938\n",
      "Epoch 5/5, Training Loss: 0.1897\n",
      "Epoch 5/5, Training Loss: 0.1545\n",
      "Epoch 5/5, Training Loss: 0.3142\n",
      "Epoch 5/5, Training Loss: 0.0848\n",
      "Epoch 5/5, Training Loss: 0.3086\n",
      "Epoch 5/5, Training Loss: 0.1086\n",
      "Epoch 5/5, Training Loss: 0.2109\n",
      "Epoch 5/5, Training Loss: 0.1795\n",
      "Epoch 5/5, Training Loss: 0.1331\n",
      "Epoch 5/5, Training Loss: 0.2027\n",
      "Epoch 5/5, Training Loss: 0.2415\n",
      "Epoch 5/5, Training Loss: 0.1470\n",
      "Epoch 5/5, Training Loss: 0.2834\n",
      "Epoch 5/5, Training Loss: 0.2696\n",
      "Epoch 5/5, Training Loss: 0.2330\n",
      "Epoch 5/5, Training Loss: 0.2107\n",
      "Epoch 5/5, Training Loss: 0.2158\n",
      "Epoch 5/5, Training Loss: 0.2379\n",
      "Epoch 5/5, Training Loss: 0.1848\n",
      "Epoch 5/5, Training Loss: 0.0994\n",
      "Epoch 5/5, Training Loss: 0.1218\n",
      "Epoch 5/5, Training Loss: 0.1299\n",
      "Epoch 5/5, Training Loss: 0.1285\n",
      "Epoch 5/5, Training Loss: 0.1592\n",
      "Epoch 5/5, Training Loss: 0.1904\n",
      "Epoch 5/5, Training Loss: 0.3877\n",
      "Epoch 5/5, Training Loss: 0.1589\n",
      "Epoch 5/5, Training Loss: 0.1001\n",
      "Epoch 5/5, Training Loss: 0.1039\n",
      "Epoch 5/5, Training Loss: 0.0967\n",
      "Epoch 5/5, Training Loss: 0.2727\n",
      "Epoch 5/5, Training Loss: 0.0833\n",
      "Epoch 5/5, Training Loss: 0.2055\n",
      "Epoch 5/5, Training Loss: 0.0677\n",
      "Epoch 5/5, Training Loss: 0.1784\n",
      "Epoch 5/5, Training Loss: 0.0619\n",
      "Epoch 5/5, Training Loss: 0.2743\n",
      "Epoch 5/5, Training Loss: 0.3440\n",
      "Epoch 5/5, Training Loss: 0.0736\n",
      "Epoch 5/5, Training Loss: 0.2189\n",
      "Epoch 5/5, Training Loss: 0.1833\n",
      "Epoch 5/5, Training Loss: 0.2394\n",
      "Epoch 5/5, Training Loss: 0.3154\n",
      "Epoch 5/5, Training Loss: 0.2511\n",
      "Epoch 5/5, Training Loss: 0.1378\n",
      "Epoch 5/5, Training Loss: 0.2310\n",
      "Epoch 5/5, Training Loss: 0.2428\n",
      "Epoch 5/5, Training Loss: 0.2953\n",
      "Epoch 5/5, Training Loss: 0.1831\n",
      "Epoch 5/5, Training Loss: 0.2025\n",
      "Epoch 5/5, Training Loss: 0.1849\n",
      "Epoch 5/5, Training Loss: 0.1710\n",
      "Epoch 5/5, Training Loss: 0.2824\n",
      "Epoch 5/5, Training Loss: 0.2197\n",
      "Epoch 5/5, Training Loss: 0.2105\n",
      "Epoch 5/5, Training Loss: 0.1165\n",
      "Epoch 5/5, Training Loss: 0.1374\n",
      "Epoch 5/5, Training Loss: 0.2352\n",
      "Epoch 5/5, Training Loss: 0.0997\n",
      "Epoch 5/5, Training Loss: 0.0916\n",
      "Epoch 5/5, Training Loss: 0.3139\n",
      "Epoch 5/5, Training Loss: 0.1447\n",
      "Epoch 5/5, Training Loss: 0.1559\n",
      "Epoch 5/5, Training Loss: 0.1092\n",
      "Epoch 5/5, Training Loss: 0.0497\n",
      "Epoch 5/5, Training Loss: 0.3810\n",
      "Epoch 5/5, Training Loss: 0.2437\n",
      "Epoch 5/5, Training Loss: 0.1879\n",
      "Epoch 5/5, Training Loss: 0.1115\n",
      "Epoch 5/5, Training Loss: 0.1399\n",
      "Epoch 5/5, Training Loss: 0.0685\n",
      "Epoch 5/5, Training Loss: 0.1232\n",
      "Epoch 5/5, Training Loss: 0.2330\n",
      "Epoch 5/5, Training Loss: 0.1304\n",
      "Epoch 5/5, Training Loss: 0.1125\n",
      "Epoch 5/5, Training Loss: 0.2207\n",
      "Epoch 5/5, Training Loss: 0.2147\n",
      "Epoch 5/5, Training Loss: 0.3004\n",
      "Epoch 5/5, Training Loss: 0.2463\n",
      "Epoch 5/5, Training Loss: 0.0461\n",
      "Epoch 5/5, Training Loss: 0.1937\n",
      "Epoch 5/5, Training Loss: 0.1969\n",
      "Epoch 5/5, Training Loss: 0.2838\n",
      "Epoch 5/5, Training Loss: 0.0508\n",
      "Epoch 5/5, Training Loss: 0.0778\n",
      "Epoch 5/5, Training Loss: 0.1120\n",
      "Epoch 5/5, Training Loss: 0.2815\n",
      "Epoch 5/5, Training Loss: 0.0895\n",
      "Epoch 5/5, Training Loss: 0.1230\n",
      "Epoch 5/5, Training Loss: 0.2112\n",
      "Epoch 5/5, Training Loss: 0.1387\n",
      "Epoch 5/5, Training Loss: 0.0921\n",
      "Epoch 5/5, Training Loss: 0.1465\n",
      "Epoch 5/5, Training Loss: 0.1864\n",
      "Epoch 5/5, Training Loss: 0.1415\n",
      "Epoch 5/5, Training Loss: 0.1008\n",
      "Epoch 5/5, Training Loss: 0.0944\n",
      "Epoch 5/5, Training Loss: 0.0341\n",
      "Epoch 5/5, Training Loss: 0.1393\n",
      "Epoch 5/5, Training Loss: 0.2175\n",
      "Epoch 5/5, Training Loss: 0.0574\n",
      "Epoch 5/5, Training Loss: 0.0535\n",
      "Epoch 5/5, Training Loss: 0.1970\n",
      "Epoch 5/5, Training Loss: 0.0633\n",
      "Epoch 5/5, Training Loss: 0.0746\n",
      "Epoch 5/5, Training Loss: 0.1336\n",
      "Epoch 5/5, Training Loss: 0.1030\n",
      "Epoch 5/5, Training Loss: 0.1022\n",
      "Epoch 5/5, Training Loss: 0.1292\n",
      "Epoch 5/5, Training Loss: 0.2476\n",
      "Epoch 5/5, Training Loss: 0.2526\n",
      "Epoch 5/5, Training Loss: 0.1724\n",
      "Epoch 5/5, Training Loss: 0.2060\n",
      "Epoch 5/5, Training Loss: 0.1360\n",
      "Epoch 5/5, Training Loss: 0.1649\n",
      "Epoch 5/5, Training Loss: 0.1488\n",
      "Epoch 5/5, Training Loss: 0.2384\n",
      "Epoch 5/5, Training Loss: 0.1060\n",
      "Epoch 5/5, Training Loss: 0.4743\n",
      "Epoch 5/5, Training Loss: 0.0526\n",
      "Epoch 5/5, Training Loss: 0.1065\n",
      "Epoch 5/5, Training Loss: 0.1494\n",
      "Epoch 5/5, Training Loss: 0.0469\n",
      "Epoch 5/5, Training Loss: 0.1706\n",
      "Epoch 5/5, Training Loss: 0.1542\n",
      "Epoch 5/5, Training Loss: 0.2559\n",
      "Epoch 5/5, Training Loss: 0.3330\n",
      "Epoch 5/5, Training Loss: 0.2199\n",
      "Epoch 5/5, Training Loss: 0.1776\n",
      "Epoch 5/5, Training Loss: 0.1307\n",
      "Epoch 5/5, Training Loss: 0.1486\n",
      "Epoch 5/5, Training Loss: 0.1289\n",
      "Epoch 5/5, Training Loss: 0.2344\n",
      "Epoch 5/5, Training Loss: 0.1110\n",
      "Epoch 5/5, Training Loss: 0.1977\n",
      "Epoch 5/5, Training Loss: 0.1489\n",
      "Epoch 5/5, Training Loss: 0.3202\n",
      "Epoch 5/5, Training Loss: 0.2017\n",
      "Epoch 5/5, Training Loss: 0.1546\n",
      "Epoch 5/5, Training Loss: 0.2044\n",
      "Epoch 5/5, Training Loss: 0.1131\n",
      "Epoch 5/5, Training Loss: 0.1961\n",
      "Epoch 5/5, Training Loss: 0.4079\n",
      "Epoch 5/5, Training Loss: 0.0375\n",
      "Epoch 5/5, Training Loss: 0.0835\n",
      "Epoch 5/5, Training Loss: 0.3798\n",
      "Epoch 5/5, Training Loss: 0.2662\n",
      "Epoch 5/5, Training Loss: 0.0625\n",
      "Epoch 5/5, Training Loss: 0.1663\n",
      "Epoch 5/5, Training Loss: 0.0935\n",
      "Epoch 5/5, Training Loss: 0.0828\n",
      "Epoch 5/5, Training Loss: 0.1345\n",
      "Epoch 5/5, Training Loss: 0.2349\n",
      "Epoch 5/5, Training Loss: 0.3276\n",
      "Epoch 5/5, Training Loss: 0.2249\n",
      "Epoch 5/5, Training Loss: 0.1326\n",
      "Epoch 5/5, Training Loss: 0.1634\n",
      "Epoch 5/5, Training Loss: 0.3075\n",
      "Epoch 5/5, Training Loss: 0.1948\n",
      "Epoch 5/5, Training Loss: 0.1165\n",
      "Epoch 5/5, Training Loss: 0.1695\n",
      "Epoch 5/5, Training Loss: 0.0943\n",
      "Epoch 5/5, Training Loss: 0.1567\n",
      "Epoch 5/5, Training Loss: 0.0433\n",
      "Epoch 5/5, Training Loss: 0.1231\n",
      "Epoch 5/5, Training Loss: 0.1354\n",
      "Epoch 5/5, Training Loss: 0.1058\n",
      "Epoch 5/5, Training Loss: 0.1278\n",
      "Epoch 5/5, Training Loss: 0.2243\n",
      "Epoch 5/5, Training Loss: 0.1514\n",
      "Epoch 5/5, Training Loss: 0.1200\n",
      "Epoch 5/5, Training Loss: 0.3525\n",
      "Epoch 5/5, Training Loss: 0.2138\n",
      "Epoch 5/5, Training Loss: 0.0786\n",
      "Epoch 5/5, Training Loss: 0.0633\n",
      "Epoch 5/5, Training Loss: 0.2286\n",
      "Epoch 5/5, Training Loss: 0.3396\n",
      "Epoch 5/5, Training Loss: 0.1589\n",
      "Epoch 5/5, Training Loss: 0.2349\n",
      "Epoch 5/5, Training Loss: 0.1270\n",
      "Epoch 5/5, Training Loss: 0.3091\n",
      "Epoch 5/5, Training Loss: 0.1744\n",
      "Epoch 5/5, Training Loss: 0.1113\n",
      "Epoch 5/5, Training Loss: 0.1084\n",
      "Epoch 5/5, Training Loss: 0.0604\n",
      "Epoch 5/5, Training Loss: 0.1246\n",
      "Epoch 5/5, Training Loss: 0.1025\n",
      "Epoch 5/5, Training Loss: 0.1998\n",
      "Epoch 5/5, Training Loss: 0.1923\n",
      "Epoch 5/5, Training Loss: 0.1130\n",
      "Epoch 5/5, Training Loss: 0.2220\n",
      "Epoch 5/5, Training Loss: 0.2862\n",
      "Epoch 5/5, Training Loss: 0.1368\n",
      "Epoch 5/5, Training Loss: 0.1668\n",
      "Epoch 5/5, Training Loss: 0.2407\n",
      "Epoch 5/5, Training Loss: 0.2083\n",
      "Epoch 5/5, Training Loss: 0.0649\n",
      "Epoch 5/5, Training Loss: 0.2273\n",
      "Epoch 5/5, Training Loss: 0.1737\n",
      "Epoch 5/5, Training Loss: 0.2150\n",
      "Epoch 5/5, Training Loss: 0.1318\n",
      "Epoch 5/5, Training Loss: 0.1550\n",
      "Epoch 5/5, Training Loss: 0.1117\n",
      "Epoch 5/5, Training Loss: 0.2501\n",
      "Epoch 5/5, Training Loss: 0.1228\n",
      "Epoch 5/5, Training Loss: 0.3250\n",
      "Epoch 5/5, Training Loss: 0.1123\n",
      "Epoch 5/5, Training Loss: 0.3299\n",
      "Epoch 5/5, Training Loss: 0.0783\n",
      "Epoch 5/5, Training Loss: 0.2064\n",
      "Epoch 5/5, Training Loss: 0.1534\n",
      "Epoch 5/5, Training Loss: 0.3124\n",
      "Epoch 5/5, Training Loss: 0.2041\n",
      "Epoch 5/5, Training Loss: 0.2632\n",
      "Epoch 5/5, Training Loss: 0.2112\n",
      "Epoch 5/5, Training Loss: 0.2665\n",
      "Epoch 5/5, Training Loss: 0.2447\n",
      "Epoch 5/5, Training Loss: 0.1349\n",
      "Epoch 5/5, Training Loss: 0.1241\n",
      "Epoch 5/5, Training Loss: 0.3204\n",
      "Epoch 5/5, Training Loss: 0.1353\n",
      "Epoch 5/5, Training Loss: 0.1725\n",
      "Epoch 5/5, Training Loss: 0.1051\n",
      "Epoch 5/5, Training Loss: 0.0910\n",
      "Epoch 5/5, Training Loss: 0.2941\n",
      "Epoch 5/5, Training Loss: 0.1844\n",
      "Epoch 5/5, Training Loss: 0.2351\n",
      "Epoch 5/5, Training Loss: 0.2033\n",
      "Epoch 5/5, Training Loss: 0.2304\n",
      "Epoch 5/5, Training Loss: 0.0424\n",
      "Epoch 5/5, Training Loss: 0.1054\n",
      "Epoch 5/5, Training Loss: 0.2492\n",
      "Epoch 5/5, Training Loss: 0.1650\n",
      "Epoch 5/5, Training Loss: 0.1321\n",
      "Epoch 5/5, Training Loss: 0.2541\n",
      "Epoch 5/5, Training Loss: 0.3503\n",
      "Epoch 5/5, Training Loss: 0.1877\n",
      "Epoch 5/5, Training Loss: 0.2090\n",
      "Epoch 5/5, Training Loss: 0.1725\n",
      "Epoch 5/5, Training Loss: 0.1806\n",
      "Epoch 5/5, Training Loss: 0.1670\n",
      "Epoch 5/5, Training Loss: 0.1986\n",
      "Epoch 5/5, Training Loss: 0.2442\n",
      "Epoch 5/5, Training Loss: 0.1958\n",
      "Epoch 5/5, Training Loss: 0.1037\n",
      "Epoch 5/5, Training Loss: 0.2188\n",
      "Epoch 5/5, Training Loss: 0.2507\n",
      "Epoch 5/5, Training Loss: 0.3635\n",
      "Epoch 5/5, Training Loss: 0.1435\n",
      "Epoch 5/5, Training Loss: 0.1538\n",
      "Epoch 5/5, Training Loss: 0.1796\n",
      "Epoch 5/5, Training Loss: 0.1572\n",
      "Epoch 5/5, Training Loss: 0.4435\n",
      "Epoch 5/5, Training Loss: 0.1676\n",
      "Epoch 5/5, Training Loss: 0.1474\n",
      "Epoch 5/5, Training Loss: 0.2088\n",
      "Epoch 5/5, Training Loss: 0.0942\n",
      "Epoch 5/5, Training Loss: 0.1422\n",
      "Epoch 5/5, Training Loss: 0.1644\n",
      "Epoch 5/5, Training Loss: 0.3554\n",
      "Epoch 5/5, Training Loss: 0.2169\n",
      "Epoch 5/5, Training Loss: 0.1550\n",
      "Epoch 5/5, Training Loss: 0.2516\n",
      "Epoch 5/5, Training Loss: 0.2276\n",
      "Epoch 5/5, Training Loss: 0.1390\n",
      "Epoch 5/5, Training Loss: 0.3439\n",
      "Epoch 5/5, Training Loss: 0.1680\n",
      "Epoch 5/5, Training Loss: 0.2684\n",
      "Epoch 5/5, Training Loss: 0.1333\n",
      "Epoch 5/5, Training Loss: 0.0942\n",
      "Epoch 5/5, Training Loss: 0.1472\n",
      "Epoch 5/5, Training Loss: 0.2798\n",
      "Epoch 5/5, Training Loss: 0.2108\n",
      "Epoch 5/5, Training Loss: 0.1374\n",
      "Epoch 5/5, Training Loss: 0.1585\n",
      "Epoch 5/5, Training Loss: 0.1583\n",
      "Epoch 5/5, Training Loss: 0.2956\n",
      "Epoch 5/5, Training Loss: 0.0575\n",
      "Epoch 5/5, Training Loss: 0.2930\n",
      "Epoch 5/5, Training Loss: 0.3032\n",
      "Epoch 5/5, Training Loss: 0.1559\n",
      "Epoch 5/5, Training Loss: 0.1207\n",
      "Epoch 5/5, Training Loss: 0.1098\n",
      "Epoch 5/5, Training Loss: 0.1087\n",
      "Epoch 5/5, Training Loss: 0.2803\n",
      "Epoch 5/5, Training Loss: 0.3093\n",
      "Epoch 5/5, Training Loss: 0.1029\n",
      "Epoch 5/5, Training Loss: 0.2948\n",
      "Epoch 5/5, Training Loss: 0.1680\n",
      "Epoch 5/5, Training Loss: 0.1456\n",
      "Epoch 5/5, Training Loss: 0.1456, Cross-Validation Loss: 0.4546\n"
     ]
    }
   ],
   "source": [
    "# 定义损失函数\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "\n",
    "# 只优化未冻结的参数\n",
    "# optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()))\n",
    "optimizer = torch.optim.Adam(model.parameters())\n",
    "\n",
    "# 训练模型\n",
    "num_epochs = 5\n",
    "train_losses = []\n",
    "cv_losses = []\n",
    "\n",
    "for epoch in range(num_epochs):\n",
    "    model.train()\n",
    "    for images, labels in trLoader:\n",
    "        optimizer.zero_grad()\n",
    "        outputs = model(images)\n",
    "        loss = criterion(outputs, labels)\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        \n",
    "        print(f'Epoch {epoch+1}/{num_epochs}, Training Loss: {loss.item():.4f}')\n",
    "        \n",
    "    train_losses.append(loss.item())\n",
    "\n",
    "    # 计算交叉验证损失\n",
    "    model.eval()\n",
    "    cv_loss = 0.0\n",
    "    with torch.no_grad():\n",
    "        for images, labels in cvLoader:\n",
    "            outputs = model(images)\n",
    "            loss = criterion(outputs, labels)\n",
    "            cv_loss += loss.item()\n",
    "    cv_losses.append(cv_loss / len(cvLoader))\n",
    "\n",
    "    print(f'Epoch {epoch+1}/{num_epochs}, Training Loss: {train_losses[-1]:.4f}, Cross-Validation Loss: {cv_losses[-1]:.4f}')\n",
    "\n",
    "# 保存模型\n",
    "torch.save(model.state_dict(), 'mnist_resnet18_finetuned.pth')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "计算识别精度，展示学习曲线"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Accuracy on training set: 96.41%\n",
      "Accuracy on cross-validation set: 86.75%\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAA04AAAHUCAYAAAANwniNAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAACACUlEQVR4nO3dd3wT9f8H8NclaZLuvReFQhezZSMb2chSQJAhIKI4EAegoigqTkB/MkSWIgIKri8ie8qQVWbLprTQltLSPdI2ud8faUPTpumgbTpez8cjD9rL5+7eOQ7oi884QRRFEURERERERFQqiakLICIiIiIiqu0YnIiIiIiIiMrA4ERERERERFQGBiciIiIiIqIyMDgRERERERGVgcGJiIiIiIioDAxOREREREREZWBwIiIiIiIiKgODExERERERURkYnIioXhAEoVyvAwcOPNJ55s+fD0EQKrXvgQMHqqSG2m7SpElo1KhRudpqNBqsX78effr0gZOTE8zMzODi4oLBgwfjf//7HzQaTfUW+4jOnTsHQRAwZ86cUttcu3YNgiDglVdeKfdxDd1nPXr0QI8ePcrcNyoqCoIgYN26deU+X6GIiAjMnz8fUVFRJd6ryO9rVRMEAS+99JJJzk1EVEhm6gKIiKrCsWPH9L5fsGAB9u/fj3379ultDw4OfqTzTJ06Ff3796/UvqGhoTh27Ngj11Bf5OTkYNiwYdi1axfGjBmD5cuXw83NDffv38eOHTvw1FNPYfPmzRg6dKipSy1Vq1atEBYWhh9//BEff/wxpFJpiTZr164FAEyZMuWRzrVs2bJH2r88IiIi8MEHH6BHjx4lQtK8efPw6quvVnsNRES1FYMTEdULHTt21Pve2dkZEomkxPbisrKyYGFhUe7zeHl5wcvLq1I12tjYlFlPQzJr1izs3LkTP/zwAyZMmKD33ogRI/Dmm28iOzu71P3z8vIgCAJkMtP+UzZlyhS8+OKL+OeffzB48GC999RqNX788UeEhYWhVatWj3QeUwfuJk2amPT8RESmxqF6RNRg9OjRA82bN8ehQ4fQuXNnWFhYYPLkyQCAzZs3o2/fvnB3d4e5uTmCgoIwZ84cZGZm6h3D0BCqRo0aYfDgwdixYwdCQ0Nhbm6OwMBArFmzRq+doaF6kyZNgpWVFa5fv46BAwfCysoK3t7eeP3116FSqfT2v3PnDp588klYW1vDzs4O48aNw8mTJ8s1LOv+/ft48cUXERwcDCsrK7i4uKBXr144fPiwXrvCYV5ffvklFi1aBD8/P1hZWaFTp044fvx4ieOuW7cOAQEBUCgUCAoKwo8//mi0jkLx8fFYtWoV+vXrVyI0FWratClatmwJ4OG1W79+PV5//XV4enpCoVDg+vXrAIA1a9agVatWUCqVcHBwwPDhwxEZGal3vJs3b2LMmDHw8PCAQqGAq6srevfujbNnz+ra7Nu3Dz169ICjoyPMzc3h4+ODkSNHIisrq9TPMnbsWJibm+t6loratWsX7t69W+H7zBBDQ/ViY2MxatQoWFtbw9bWFqNHj0Z8fHyJfU+dOoUxY8agUaNGMDc3R6NGjfD000/j9u3bujbr1q3DU089BQDo2bOnbnhr4b1laKheTk4O5s6dCz8/P8jlcnh6emLGjBlISUnRa1fePyOP4sGDB3jxxRfh6ekJuVyOxo0b45133inx5+jXX39Fhw4dYGtrCwsLCzRu3Fj3+wNoh49+9NFHCAgIgLm5Oezs7NCyZUt8/fXXVVYrEdVN7HEiogYlLi4OzzzzDN566y188sknkEi0/3907do1DBw4EDNnzoSlpSUuX76Mzz77DCdOnCgx3M+Qc+fO4fXXX8ecOXPg6uqKVatWYcqUKfD390e3bt2M7puXl4cnnngCU6ZMweuvv45Dhw5hwYIFsLW1xXvvvQcAyMzMRM+ePfHgwQN89tln8Pf3x44dOzB69Ohyfe4HDx4AAN5//324ubkhIyMDv//+O3r06IG9e/eW+IF86dKlCAwMxJIlSwBoh2kNHDgQt27dgq2tLQDtD9rPPvsshg4diq+++gqpqamYP38+VCqV7rqWZv/+/cjLy8OwYcPKVX+huXPnolOnTlixYgUkEglcXFywcOFCvP3223j66aexcOFCJCUlYf78+ejUqRNOnjyJpk2bAgAGDhwItVqNzz//HD4+PkhMTMTRo0d1P+RHRUVh0KBB6Nq1K9asWQM7OzvcvXsXO3bsQG5ubqk9k7a2thg5ciQ2b96M+/fvw9nZWffe2rVroVQqMXbsWACPfp8VlZ2djT59+iA2NhYLFy5Es2bN8Pfffxu8J6KiohAQEIAxY8bAwcEBcXFxWL58Odq1a4eIiAg4OTlh0KBB+OSTT/D2229j6dKlCA0NBVB6T5Moihg2bBj27t2LuXPnomvXrjh//jzef/99HDt2DMeOHYNCodC1f5Q/I2XJyclBz549cePGDXzwwQdo2bIlDh8+jIULF+Ls2bP4+++/AWiH9I4ePRqjR4/G/PnzoVQqcfv2bb1r//nnn2P+/Pl499130a1bN+Tl5eHy5cslwiARNUAiEVE9NHHiRNHS0lJvW/fu3UUA4t69e43uq9FoxLy8PPHgwYMiAPHcuXO6995//32x+F+dvr6+olKpFG/fvq3blp2dLTo4OIjPP/+8btv+/ftFAOL+/fv16gQg/vLLL3rHHDhwoBgQEKD7funSpSIA8Z9//tFr9/zzz4sAxLVr1xr9TMXl5+eLeXl5Yu/evcXhw4frtt+6dUsEILZo0ULMz8/XbT9x4oQIQNy4caMoiqKoVqtFDw8PMTQ0VNRoNLp2UVFRopmZmejr62v0/J9++qkIQNyxY0e56i28dt26ddPbnpycLJqbm4sDBw7U2x4dHS0qFApx7NixoiiKYmJioghAXLJkSann2LJliwhAPHv2bLlqMlTfokWLdNuSkpJEhUIhjhs3zuA+Fb3PunfvLnbv3l33/fLly0UA4p9//qnX7rnnnivznsjPzxczMjJES0tL8euvv9Zt//XXX0vco4UmTpyo9/u6Y8cOEYD4+eef67XbvHmzCEBcuXKlblt5/4yUBoA4Y8aMUt9fsWKFwT9Hn332mQhA3LVrlyiKovjll1+KAMSUlJRSjzV48GCxdevWZdZERA0Ph+oRUYNib2+PXr16ldh+8+ZNjB07Fm5ubpBKpTAzM0P37t0BoMSQL0Nat24NHx8f3fdKpRLNmjXTGwpVGkEQMGTIEL1tLVu21Nv34MGDsLa2LrEwxdNPP13m8QutWLECoaGhUCqVkMlkMDMzw969ew1+vkGDBuktdFA4ZK6wpitXriA2NhZjx47VG7ro6+uLzp07l7umiho5cqTe98eOHUN2djYmTZqkt93b2xu9evXC3r17AQAODg5o0qQJvvjiCyxatAjh4eElVuxr3bo15HI5pk2bhh9++AE3b94scX61Wo38/Hzdq/AY3bt3R5MmTfSG623YsAEqlUpvGNij3mdF7d+/H9bW1njiiSf0thf2bhWVkZGB2bNnw9/fHzKZDDKZDFZWVsjMzKzweQsV9tIUv/ZPPfUULC0tdde+0KP8GSlPLZaWlnjyySf1thfWVlhLu3btAACjRo3CL7/8grt375Y4Vvv27XHu3Dm8+OKL2LlzJ9LS0h65PiKqHxiciKhBcXd3L7EtIyMDXbt2xX///YePPvoIBw4cwMmTJ/Hbb78BgNEFCgo5OjqW2KZQKMq1r4WFBZRKZYl9c3JydN8nJSXB1dW1xL6GthmyaNEivPDCC+jQoQO2bt2K48eP4+TJk+jfv7/BGot/nsIhV4Vtk5KSAABubm4l9jW0rbjCH6Bv3bpVrvoLFf/9K6zD0O+rh4eH7n1BELB3717069cPn3/+OUJDQ+Hs7IxXXnkF6enpALRD0vbs2QMXFxfMmDEDTZo0QZMmTfTmtvTu3RtmZma6V2EoEgQBkydPxoULF3Dq1CkA2mF6fn5+6NmzJ4Cquc+Kf3ZDv/+Grv/YsWPx7bffYurUqdi5cydOnDiBkydPwtnZucLnLXp+mUymNzQR0F4LNzc33bUv9Ch/RspTi5ubW4n5hy4uLpDJZLpaunXrhj/++AP5+fmYMGECvLy80Lx5c2zcuFG3z9y5c/Hll1/i+PHjGDBgABwdHdG7d2/d7ysRNVyc40REDYqhZzDt27cPsbGxOHDggO5//wHUqjkNjo6OOHHiRInthhYCMOSnn35Cjx49sHz5cr3thaGhMvWUdv7y1NSzZ0+YmZnhjz/+wPTp08t93uK/f4V1xMXFlWgbGxsLJycn3fe+vr5YvXo1AODq1av45ZdfMH/+fOTm5mLFihUAgK5du6Jr165Qq9U4deoU/u///g8zZ86Eq6srxowZg++++07vmhU9/qRJk/Dee+9hzZo1MDMzQ3h4OBYsWKCruarvs/LeE6mpqdi2bRvef/99vedNqVQq3dy3yp4/Pz+/xLwuURQRHx+v692pCY6Ojvjvv/8giqLePZKQkID8/Hy936ehQ4di6NChUKlUOH78OBYuXIixY8eiUaNG6NSpE2QyGWbNmoVZs2YhJSUFe/bswdtvv41+/fohJiamQqtwElH9wh4nImrwCn/QKjqRHQC+++47U5RjUPfu3ZGeno5//vlHb/umTZvKtb8gCCU+3/nz50s8/6q8AgIC4O7ujo0bN0IURd3227dv4+jRo2Xu7+bmpuv9KG0lvhs3buD8+fNGj9OpUyeYm5vjp59+0tt+584d7Nu3D7179za4X7NmzfDuu++iRYsWOHPmTIn3pVIpOnTogKVLlwKArk1AQADatm2rexVdZc7DwwP9+/fHxo0bsXTpUkgkEkycOFH3flXfZz179kR6ejr++usvve0///yz3veCIEAUxRLnXbVqFdRqtd624j2LxhRe2+LXfuvWrcjMzCz12leH3r17IyMjA3/88Yfe9sJ7y1AtCoUC3bt3x2effQYACA8PL9HGzs4OTz75JGbMmIEHDx4YfDAwETUc7HEiogavc+fOsLe3x/Tp0/H+++/DzMwMGzZswLlz50xdms7EiROxePFiPPPMM/joo4/g7++Pf/75Bzt37gSAMlexGzx4MBYsWID3338f3bt3x5UrV/Dhhx/Cz88P+fn5Fa5HIpFgwYIFmDp1KoYPH47nnnsOKSkpmD9/frmG6gHa4YM3b97EpEmTsHPnTgwfPhyurq5ITEzE7t27sXbtWmzatEk3v8oQOzs7zJs3D2+//TYmTJiAp59+GklJSfjggw+gVCrx/vvvA9CGxJdeeglPPfUUmjZtCrlcjn379uH8+fO6XpgVK1Zg3759GDRoEHx8fJCTk6NbLrtPnz7l+kxTpkzB33//rVtq3dvbW/deVd9nEyZMwOLFizFhwgR8/PHHaNq0KbZv3667JwrZ2NigW7du+OKLL+Dk5IRGjRrh4MGDWL16Nezs7PTaNm/eHACwcuVKWFtbQ6lUws/Pz+Awu8cffxz9+vXD7NmzkZaWhi5duuhW1WvTpg3Gjx9fqc9Vmhs3bmDLli0ltgcHB2PChAlYunQpJk6ciKioKLRo0QL//vsvPvnkEwwcOFD3+/fee+/hzp076N27N7y8vJCSkoKvv/5ab67ZkCFD0Lx5c7Rt2xbOzs64ffs2lixZAl9fX90KjUTUQJl2bQoioupR2qp6ISEhBtsfPXpU7NSpk2hhYSE6OzuLU6dOFc+cOVNidbLSVtUbNGhQiWMWXwWttFX1itdZ2nmio6PFESNGiFZWVqK1tbU4cuRIcfv27QZXVitOpVKJb7zxhujp6SkqlUoxNDRU/OOPP0qslFa4qt4XX3xR4hgAxPfff19v26pVq8SmTZuKcrlcbNasmbhmzZoSxzQmPz9f/OGHH8RevXqJDg4OokwmE52dncUBAwaIP//8s6hWq0VRfHjtfv31V4PHWbVqldiyZUtRLpeLtra24tChQ8VLly7p3r937544adIkMTAwULS0tBStrKzEli1biosXL9atHnjs2DFx+PDhoq+vr6hQKERHR0exe/fu4l9//VWuzyKKopibmyu6uroaXOFNFB/tPit+P4miKN65c0ccOXKk3j1x9OjREscrbGdvby9aW1uL/fv3Fy9evCj6+vqKEydO1DvmkiVLRD8/P1Eqleodx9Dva3Z2tjh79mzR19dXNDMzE93d3cUXXnhBTE5O1mtX3j8jpQFQ6qvwnkxKShKnT58uuru7izKZTPT19RXnzp0r5uTk6I6zbds2ccCAAaKnp6col8tFFxcXceDAgeLhw4d1bb766iuxc+fOopOTkyiXy0UfHx9xypQpYlRUVJl1ElH9JohikTEWRERUp3zyySd49913ER0dDS8vL1OXQ0REVG9xqB4RUR3x7bffAgACAwORl5eHffv24ZtvvsEzzzzD0ERERFTNGJyIiOoICwsLLF68GFFRUVCpVPDx8cHs2bPx7rvvmro0IiKieo9D9YiIiIiIiMrA5ciJiIiIiIjKwOBERERERERUBgYnIiIiIiKiMjS4xSE0Gg1iY2NhbW2te4o7ERERERE1PKIoIj09HR4eHmU+TL7BBafY2Fi9J7kTEREREVHDFhMTU+ajPRpccLK2tgagvTg2NjYmroaIiIiIiEwlLS0N3t7euoxgTIMLToXD82xsbBiciIiIiIioXFN4uDgEERERERFRGRiciIiIiIiIysDgREREREREVIYGN8eJiIiIqCERRRH5+flQq9WmLoXIJMzMzCCVSh/5OAxORERERPVUbm4u4uLikJWVZepSiExGEAR4eXnBysrqkY7D4ERERERUD2k0Gty6dQtSqRQeHh6Qy+XlWjmMqD4RRRH379/HnTt30LRp00fqeWJwIiIiIqqHcnNzodFo4O3tDQsLC1OXQ2Qyzs7OiIqKQl5e3iMFJy4OQURERFSPSST8cY8atqrqaeWfJCIiIiIiojIwOBEREREREZWBwYmIiIiI6rUePXpg5syZ5W4fFRUFQRBw9uzZaquJ6h4GJyIiIiKqFQRBMPqaNGlSpY7722+/YcGCBeVu7+3tjbi4ODRv3rxS5ysvBrS6havqmVhqdh5szc1MXQYRERGRycXFxem+3rx5M9577z1cuXJFt83c3FyvfV5eHszMyv45ysHBoUJ1SKVSuLm5VWgfqv/Y42RCF++moscX+7H5ZLSpSyEiIqIGQBRFZOXm1/hLFMVy1efm5qZ72draQhAE3fc5OTmws7PDL7/8gh49ekCpVOKnn35CUlISnn76aXh5ecHCwgItWrTAxo0b9Y5bfKheo0aN8Mknn2Dy5MmwtraGj48PVq5cqXu/eE/QgQMHIAgC9u7di7Zt28LCwgKdO3fWC3UA8NFHH8HFxQXW1taYOnUq5syZg9atW1fq9woAVCoVXnnlFbi4uECpVOKxxx7DyZMnde8nJydj3LhxcHZ2hrm5OZo2bYq1a9cC0C5H/9JLL8Hd3R1KpRKNGjXCwoULK10LscfJpP65GIfkrDzM+e0CBAgY1c7b1CURERFRPZadp0bweztr/LwRH/aDhbxqfuycPXs2vvrqK6xduxYKhQI5OTkICwvD7NmzYWNjg7///hvjx49H48aN0aFDh1KP89VXX2HBggV4++23sWXLFrzwwgvo1q0bAgMDS93nnXfewVdffQVnZ2dMnz4dkydPxpEjRwAAGzZswMcff4xly5ahS5cu2LRpE7766iv4+flV+rO+9dZb2Lp1K3744Qf4+vri888/R79+/XD9+nU4ODhg3rx5iIiIwD///AMnJydcv34d2dnZAIBvvvkGf/31F3755Rf4+PggJiYGMTExla6FGJxM6o2+AchUqbHuaBRm/3YeEIBRbRmeiIiIiEozc+ZMjBgxQm/bG2+8ofv65Zdfxo4dO/Drr78aDU4DBw7Eiy++CEAbxhYvXowDBw4YDU4ff/wxunfvDgCYM2cOBg0ahJycHCiVSvzf//0fpkyZgmeffRYA8N5772HXrl3IyMio1OfMzMzE8uXLsW7dOgwYMAAA8P3332P37t1YvXo13nzzTURHR6NNmzZo27YtAG1PWqHo6Gg0bdoUjz32GARBgK+vb6XqoIcYnExIEAS8PyQYoijih2O3MXvreUgEAU+GeZm6NCIiIqqHzM2kiPiwn0nOW1UKQ0IhtVqNTz/9FJs3b8bdu3ehUqmgUqlgaWlp9DgtW7bUfV04JDAhIaHc+7i7uwMAEhIS4OPjgytXruiCWKH27dtj37595fpcxd24cQN5eXno0qWLbpuZmRnat2+PyMhIAMALL7yAkSNH4syZM+jbty+GDRuGzp07AwAmTZqExx9/HAEBAejfvz8GDx6Mvn37VqoW0mJwMjFBEDD/iRCIAH48dhtvbjkHAAxPREREVOUEQaiyIXOmUjwQffXVV1i8eDGWLFmCFi1awNLSEjNnzkRubq7R4xRfVEIQBGg0mnLvIwgCAOjtU7itUHnndhlSuK+hYxZuGzBgAG7fvo2///4be/bsQe/evTFjxgx8+eWXCA0Nxa1bt/DPP/9gz549GDVqFPr06YMtW7ZUuqaGjotD1AKCIOCDJ0IwvqMvRBF4c8s5bD19x9RlEREREdV6hw8fxtChQ/HMM8+gVatWaNy4Ma5du1bjdQQEBODEiRN6206dOlXp4/n7+0Mul+Pff//VbcvLy8OpU6cQFBSk2+bs7IxJkybhp59+wpIlS/QWubCxscHo0aPx/fffY/Pmzdi6dSsePHhQ6Zoaurr9Xw71iCAI+HBoCESI+Ol4NN7Ycg4SCTC8DXueiIiIiErj7++PrVu34ujRo7C3t8eiRYsQHx+vFy5qwssvv4znnnsObdu2RefOnbF582acP38ejRs3LnPf4qvzAUBwcDBeeOEFvPnmm3BwcICPjw8+//xzZGVlYcqUKQC086jCwsIQEhIClUqFbdu26T734sWL4e7ujtatW0MikeDXX3+Fm5sb7OzsqvRzNyQMTrWIIAj48Inm0IjAz/9F4/VfzkGAgGFtPE1dGhEREVGtNG/ePNy6dQv9+vWDhYUFpk2bhmHDhiE1NbVG6xg3bhxu3ryJN954Azk5ORg1ahQmTZpUohfKkDFjxpTYduvWLXz66afQaDQYP3480tPT0bZtW+zcuRP29vYAALlcjrlz5yIqKgrm5ubo2rUrNm3aBACwsrLCZ599hmvXrkEqlaJdu3bYvn07JBIOOKssQXyUwZd1UFpaGmxtbZGamgobGxtTl2OQRiPinT8uYuOJaEgEYPHo1hjamuGJiIiIyi8nJwe3bt2Cn58flEqlqctpkB5//HG4ublh/fr1pi6lQTP2Z6Ei2YA9TrWQRCLg42HNAYjYeCIGr20+CwAMT0RERES1VFZWFlasWIF+/fpBKpVi48aN2LNnD3bv3m3q0qiKMDjVUtrw1AKiCGw6qQ1PgiDgiVYepi6NiIiIiIoRBAHbt2/HRx99BJVKhYCAAGzduhV9+vQxdWlURRicajGJRMAnw7XhafOpGMzcFA4BwBCGJyIiIqJaxdzcHHv27DF1GVSNODuslpNIBCwc0QKj2npBIwIzN5/FtvOxpi6LiIiIiKhBYY9THSCRCPh0REuIIvDr6Tt4ddNZCBAwqKW7qUsjIiIiImoQGJzqCIlEwGcjW0IEsOX0HbyyKRyCAAxswfBERERERFTdGJzqkMLwpBFF/HbmLl7eqJ3zNIDhiYiIiIioWnGOUx0jlQj44slWGNHGE2qNiJc3hmPHxThTl0VEREREVK8xONVBUomAL57Shqd8jYiXfg7Hjovxpi6LiIiIiKjeYnCqowrD07DWHgXh6Qx2XmJ4IiIiIqorevTogZkzZ+q+b9SoEZYsWWJ0H0EQ8McffzzyuavqOA0Jg1MdJpUI+GpUawwtCE8zNpzBLoYnIiIiquPi4+Px8ssvo3HjxlAoFPD29saQIUOwd+9eU5cGABgyZEipD7Y9duwYBEHAmTNnKnzckydPYtq0aY9anp758+ejdevWJbbHxcVhwIABVXqu4tatWwc7O7tqPUdNYnCq46QSAV891QpPtCoITz+fwe6Ie6Yui4iIiKhSoqKiEBYWhn379uHzzz/HhQsXsGPHDvTs2RMzZswodb+8vLwaq3HKlCnYt28fbt++XeK9NWvWoHXr1ggNDa3wcZ2dnWFhYVEVJZbJzc0NCoWiRs5VXzA41QMyqQSLRrXCkFYeyFOLeHHDaexheCIiIqLiRBHIzaz5lyiWu8QXX3wRgiDgxIkTePLJJ9GsWTOEhIRg1qxZOH78uK6dIAhYsWIFhg4dCktLS3z00UcAgOXLl6NJkyaQy+UICAjA+vXr9Y4/f/58+Pj4QKFQwMPDA6+88oruvWXLlqFp06ZQKpVwdXXFk08+abDGwYMHw8XFBevWrdPbnpWVhc2bN2PKlClISkrC008/DS8vL1hYWKBFixbYuHGj0c9efKjetWvX0K1bNyiVSgQHB2P37t0l9pk9ezaaNWsGCwsLNG7cGPPmzdOFyHXr1uGDDz7AuXPnIAgCBEHQ1Vx8qN6FCxfQq1cvmJubw9HREdOmTUNGRobu/UmTJmHYsGH48ssv4e7uDkdHR8yYMeORAmt0dDSGDh0KKysr2NjYYNSoUbh37+HPsOfOnUPPnj1hbW0NGxsbhIWF4dSpUwCA27dvY8iQIbC3t4elpSVCQkKwffv2StdSHlyOvJ6QSSVYPKoVRFHEtvNxeGHDaax4Jgy9g1xNXRoRERHVFnlZwCceNX/et2MBuWWZzR48eIAdO3bg448/hqVlyfbFh329//77WLhwIRYvXgypVIrff/8dr776KpYsWYI+ffpg27ZtePbZZ+Hl5YWePXtiy5YtWLx4MTZt2oSQkBDEx8fj3LlzAIBTp07hlVdewfr169G5c2c8ePAAhw8fNlinTCbDhAkTsG7dOrz33nsQBAEA8OuvvyI3Nxfjxo1DVlYWwsLCMHv2bNjY2ODvv//G+PHj0bhxY3To0KHMa6HRaDBixAg4OTnh+PHjSEtL05sPVcja2hrr1q2Dh4cHLly4gOeeew7W1tZ46623MHr0aFy8eBE7duzAnj17AAC2trYljpGVlYX+/fujY8eOOHnyJBISEjB16lS89NJLeuFw//79cHd3x/79+3H9+nWMHj0arVu3xnPPPVfm5ylOFEUMGzYMlpaWOHjwIPLz8/Hiiy9i9OjROHDgAABg3LhxaNOmDZYvXw6pVIqzZ8/CzMwMADBjxgzk5ubi0KFDsLS0REREBKysrCpcR0UwONUjMqkES0a3hgjg7/NxeOGnM1gxPhS9AhmeiIiIqPa7fv06RFFEYGBgudqPHTsWkydP1vt+0qRJePHFFwFA10v15ZdfomfPnoiOjoabmxv69OkDMzMz+Pj4oH379gC0vR+WlpYYPHgwrK2t4evrizZt2pR67smTJ+OLL77AgQMH0LNnTwDaYXojRoyAvb097O3t8cYbb+jav/zyy9ixYwd+/fXXcgWnPXv2IDIyElFRUfDy8gIAfPLJJyXmJb377ru6rxs1aoTXX38dmzdvxltvvQVzc3NYWVlBJpPBzc2t1HNt2LAB2dnZ+PHHH3WB9dtvv8WQIUPw2WefwdVV+7Okvb09vv32W0ilUgQGBmLQoEHYu3dvpYLTnj17cP78edy6dQve3t4AgPXr1yMkJAQnT55Eu3btEB0djTfffFN3PzRt2lS3f3R0NEaOHIkWLVoAABo3blzhGiqKwamekUkl+Hp0a0AE/r4Qh+nrz+C78WHoGehi6tKIiIjI1MwstL0/pjhvOYgFQ/oKe3DK0rZtW73vIyMjSyyu0KVLF3z99dcAgKeeegpLlixB48aN0b9/fwwcOBBDhgyBTCbD448/Dl9fX917/fv3x/Dhw2FhYYENGzbg+eef1x3zn3/+QdeuXdG5c2esWbMGPXv2xI0bN3D48GHs2rULAKBWq/Hpp59i8+bNuHv3LlQqFVQqlcGeNEMiIyPh4+OjC00A0KlTpxLttmzZgiVLluD69evIyMhAfn4+bGxsynWOoudq1aqVXm1dunSBRqPBlStXdMEpJCQEUqlU18bd3R0XLlyo0LmKntPb21sXmgAgODgYdnZ2iIyMRLt27TBr1ixMnToV69evR58+ffDUU0+hSZMmAIBXXnkFL7zwAnbt2oU+ffpg5MiRaNmyZaVqKS+Tz3FatmwZ/Pz8oFQqERYWVmqXKKAdW1k4PrPoKyQkpAYrrv1kUgmWjGmNgS3ckKvW4Pn1p7H/coKpyyIiIiJTEwTtkLmafpUzCDVt2hSCICAyMrJc7Q2FkOKhSxRF3TZvb29cuXIFS5cuhbm5OV588UV069YNeXl5sLa2xpkzZ7Bx40a4u7vjvffeQ6tWrZCSkoInnngCZ8+e1b0KA9uUKVOwdetWpKWlYe3atfD19UXv3r0BAF999RUWL16Mt956C/v27cPZs2fRr18/5ObmluuziQbmhRX/bMePH8eYMWMwYMAAbNu2DeHh4XjnnXfKfQ5D18jYOQuHyRV9T6PRVOhcZZ2z6Pb58+fj0qVLGDRoEPbt24fg4GD8/vvvAICpU6fi5s2bGD9+PC5cuIC2bdvi//7v/ypVS3mZNDht3rwZM2fOxDvvvIPw8HB07doVAwYMQHR0tMH2X3/9NeLi4nSvmJgYODg44Kmnnqrhyms/M6kEX49pgwHNH4anA1cYnoiIiKj2cnBwQL9+/bB06VJkZmaWeD8lJcXo/kFBQfj333/1th09ehRBQUG6783NzfHEE0/gm2++wYEDB3Ds2DFdr4lMJkOfPn3w+eef4/z584iKisK+fftgbW0Nf39/3cvc3BwAMGrUKEilUvz888/44Ycf8Oyzz+p+6D98+DCGDh2KZ555Bq1atULjxo1x7dq1cl+L4OBgREdHIzb2YQ/hsWPH9NocOXIEvr6+eOedd9C2bVs0bdq0xEp/crkcarW6zHOdPXtW75ofOXIEEokEzZo1K3fNFVH4+WJiYnTbIiIikJqaqvf71axZM7z22mvYtWsXRowYgbVr1+re8/b2xvTp0/Hbb7/h9ddfx/fff18ttRYyaXBatGgRpkyZgqlTpyIoKAhLliyBt7c3li9fbrC9ra0t3NzcdK9Tp04hOTkZzz77bA1XXjeYSSX45uk26B+iDU/T1p/Gwav3TV0WERERUamWLVsGtVqN9u3bY+vWrbh27RoiIyPxzTffGByqVtSbb76JdevWYcWKFbh27RoWLVqE3377TTfXaN26dVi9ejUuXryImzdvYv369TA3N4evry+2bduGb775BmfPnsXt27fx448/QqPRICAgoNTzWVlZYfTo0Xj77bcRGxuLSZMm6d7z9/fH7t27cfToUURGRuL5559HfHz5n7fZp08fBAQEYMKECTh37hwOHz6Md955R6+Nv78/oqOjsWnTJty4cQPffPONrkemUKNGjXDr1i2cPXsWiYmJUKlUJc41btw4KJVKTJw4ERcvXsT+/fvx8ssvY/z48bphepWlVqv1euvOnj2LiIgI9OnTBy1btsS4ceNw5swZnDhxAhMmTED37t3Rtm1bZGdn46WXXsKBAwdw+/ZtHDlyBCdPntSFqpkzZ2Lnzp24desWzpw5g3379ukFrupgsuCUm5uL06dPo2/fvnrb+/bti6NHj5brGKtXr0afPn3g6+tbahuVSoW0tDS9V0NiJpXg/8a2Qb8QV+Tma/Dcj6cYnoiIiKjW8vPzw5kzZ9CzZ0+8/vrraN68OR5//HHs3bu31P9cLzRs2DB8/fXX+OKLLxASEoLvvvsOa9euRY8ePQBoV+X7/vvv0aVLF7Rs2RJ79+7F//73Pzg6OsLOzg6//fYbevXqhaCgIKxYsQIbN24sc0rIlClTkJycjD59+sDHx0e3fd68eQgNDUW/fv3Qo0cPuLm5YdiwYeW+DhKJBL///jtUKhXat2+PqVOn4uOPP9ZrM3ToULz22mt46aWX0Lp1axw9ehTz5s3TazNy5Ej0798fPXv2hLOzs8El0S0sLLBz5048ePAA7dq1w5NPPonevXvj22+/LXe9pcnIyECbNm30XgMHDtQth25vb49u3bqhT58+aNy4MTZv3gwAkEqlSEpKwoQJE9CsWTOMGjUKAwYMwAcffABAG8hmzJiBoKAg9O/fHwEBAVi2bNkj12uMIBoaQFkDYmNj4enpiSNHjqBz58667Z988gl++OEHXLlyxej+cXFx8Pb2xs8//4xRo0aV2m7+/Pm6C1xUampqhSfO1WW5+Rq89PMZ7Iq4B7lMglUT2qJbM2dTl0VERETVJCcnB7du3dLNJSdqqIz9WUhLS4OtrW25soHJF4cwNoHPmHXr1sHOzq7M5D537lykpqbqXkXHUTYkcpkE344NxePBD3ueDl9jzxMRERERUXmYLDg5OTlBKpWWGOuZkJBQ5lhKURSxZs0ajB8/HnK53GhbhUIBGxsbvVdDJZdJsHRsKPoEuUKVr8HUH07h32uJpi6LiIiIiKjWM1lwksvlCAsLw+7du/W27969W2/oniEHDx7E9evXMWXKlOossV6SyyRYNi4UfYJcoMrXYMoPJ3HkOsMTEREREZExJh2qN2vWLKxatQpr1qxBZGQkXnvtNURHR2P69OkAtMPsJkyYUGK/1atXo0OHDmjevHlNl1wvyGUSLB0Xit6BD8PTUYYnIiIiIqJSmTQ4jR49GkuWLMGHH36I1q1b49ChQ9i+fbtulby4uLgSz3RKTU3F1q1b2dv0iBQyKZY9E4pegS7IydNg8g8ncfQGwxMREVF9Y6J1wIhqjar6M2CyVfVMpSIrZzQEqnw1pq8/jf1X7kNpJsHaSe3RqYmjqcsiIiKiR6RWq3H16lW4uLjA0ZH/tlPDlZqaitjYWPj7+8PMzEzvvYpkA1l1Fkm1n0ImxfJnwjD9p9M4cOU+Jq87ibXPtkPHxvwLloiIqC6TSqWws7NDQkICAO2zesqzcjFRfaLRaHD//n1YWFhAJnu06MMeJwIA5OSp8fz60zh49T7MzaQMT0RERPWAKIqIj49HSkqKqUshMhmJRAI/Pz+Dq3FXJBswOJFOTp4a09afxqGC8LTu2XbowPBERERU56nVauTl5Zm6DCKTkMvlkEgML+3A4GQEg5NxOXnqgofjJsJCLsW6Z9ujvZ+DqcsiIiIiIqpyFckGJl1Vj2ofpZkU309oi65NnZCVq8aktSdwMuqBqcsiIiIiIjIpBicqoUR4WnMCpxieiIiIiKgBY3AigwrD02P+TsjMVWPimhM4fZvhiYiIiIgaJgYnKlVheOrcxLEgPJ3E6dvJpi6LiIiIiKjGMTiRUeZyKVZPbIfOTRyRocov6HlieCIiIiKihoXBicpUGJ46NX4Yns5EMzwRERERUcPB4ETlYi6XYvWktujY2EEbnlafQDjDExERERE1EAxOVG4WchnWTGqHDn4OSFflY8LqEzgbk2LqsoiIiIiIqh2DE1WIhVyGtc+2Q/uC8DR+9X84x/BERERERPUcgxNVmIVchrWT2qF9Iwek5+TjGYYnIiIiIqrnGJyoUiwV2p6ndo3sdeHp/J0UU5dFRERERFQtGJyo0rThqT3a+haEp1X/4cKdVFOXRURERERU5Ric6JFYKWRYN1kbntIKep4u3mV4IiIiIqL6hcGJHllheArztUdqdh7GrWJ4IiIiIqL6hcGJqoSVQoZ1z7ZDqI8dwxMRERER1TsMTlRlrJVm+GFye7QpCE/PrP4Pl2IZnoiIiIio7mNwoipVGJ5ae9shJUvb8xQRm2bqsoiIiIiIHgmDE1U5G6UZfpxSNDwdZ3giIiIiojqNwYmqRWF4auVth+SC8BQZx/BERERERHUTgxNVGxulGX6c3B6tvGwLwtN/uBzP8EREREREdQ+DE1UrW3Mz/DilA1p62eJBZi7Gfv8frsSnm7osIiIiIqIKYXCiamdrbob1kzughWdheDrO8EREREREdQqDE9UIWwsz/DSlA5p72iCpIDxdvcfwRERERER1A4MT1ZjC8BTi8TA8XWN4IiIiIqI6gMGJapSdhRwbpmrDU2JGLp7+/j9cT2B4IiIiIqLajcGJalxheAp2t0FihgpjVjI8EREREVHtxuBEJlEYnoL0wlOGqcsiIiIiIjKIwYlMxt5SG54C3ayRmKHC098fx437DE9EREREVPswOJFJOVjK8fNzHRHoZo376So8vZLhiYiIiIhqHwYnMrmi4SmhIDzdZHgiIiIiolqEwYlqBYeCYXsBrgXh6fvjuJWYaeqyiIiIiIgAMDhRLeJopcCG5zqgmasV7qVpe56iGJ6IiIiIqBZgcKJaxclKgZ+f64hmrlaIT8vBGIYnIiIiIqoFGJyo1ikMT01dtOHp6e+P43YSwxMRERERmQ6DE9VKheHJ38UKcananieGJyIiIiIyFQYnqrWcrRX4+bkOaOJsibjUHDy98jiik7JMXRYRERERNUAMTlSruVgrsXFaRzRxtkRsqnbYXswDhiciIiIiqlkMTlTruVgrsfG5jmjsbIm7KdkYs5LhiYiIiIhqFoMT1QkuNkpseq4jGjsxPBERERFRzWNwojrDxUY7bK8wPD39/XHcSWZ4IiIiIqLqx+BEdYprQXjyc7LEnWRtzxPDExERERFVNwYnqnNcbbRzngrD09PfH8fdlGxTl0VERERE9RiDE9VJbrba8NTI0QIxD7IxZuUxxDI8EREREVE1MXlwWrZsGfz8/KBUKhEWFobDhw8bba9SqfDOO+/A19cXCoUCTZo0wZo1a2qoWqpN3Gy1w/Z8deHpOMMTEREREVULkwanzZs3Y+bMmXjnnXcQHh6Orl27YsCAAYiOji51n1GjRmHv3r1YvXo1rly5go0bNyIwMLAGq6baxN3WHBuf6wgfBwtEP8jC098fR1wqwxMRERERVS1BFEXRVCfv0KEDQkNDsXz5ct22oKAgDBs2DAsXLizRfseOHRgzZgxu3rwJBweHSp0zLS0Ntra2SE1NhY2NTaVrp9oltmCJ8ugHWfB1tMCmaR3hbmtu6rKIiIiIqBarSDYwWY9Tbm4uTp8+jb59++pt79u3L44ePWpwn7/++gtt27bF559/Dk9PTzRr1gxvvPEGsrNL72FQqVRIS0vTe1H942Fnjo3TOsLbwRy3k7Lw9MrjiE/NMXVZRERERFRPmCw4JSYmQq1Ww9XVVW+7q6sr4uPjDe5z8+ZN/Pvvv7h48SJ+//13LFmyBFu2bMGMGTNKPc/ChQtha2ure3l7e1fp56Daw9NOO2zPy94cUUnaYXsMT0RERERUFUy+OIQgCHrfi6JYYlshjUYDQRCwYcMGtG/fHgMHDsSiRYuwbt26Unud5s6di9TUVN0rJiamyj8D1R5e9tphel725riVmImnvz+Oe2kMT0RERET0aEwWnJycnCCVSkv0LiUkJJTohSrk7u4OT09P2Nra6rYFBQVBFEXcuXPH4D4KhQI2NjZ6L6rfvOwtsPG5jvC0KwhPKxmeiIiIiOjRmCw4yeVyhIWFYffu3Xrbd+/ejc6dOxvcp0uXLoiNjUVGRoZu29WrVyGRSODl5VWt9VLd4u2g7XnytDPHzYKepwSGJyIiIiKqJJMO1Zs1axZWrVqFNWvWIDIyEq+99hqio6Mxffp0ANphdhMmTNC1Hzt2LBwdHfHss88iIiIChw4dwptvvonJkyfD3JwrqJE+vfB0vyA8pTM8EREREVHFmTQ4jR49GkuWLMGHH36I1q1b49ChQ9i+fTt8fX0BAHFxcXrPdLKyssLu3buRkpKCtm3bYty4cRgyZAi++eYbU30EquW8HbTD9jxslbhxXztsj+GJiIiIiCrKpM9xMgU+x6lhik7KwpiVxxCbmgN/FytsfK4jnK0Vpi6LiIiIiEyoTjzHiagm+ThaYOO0jnC3VeJ6QgbGfn8c99NVpi6LiIiIiOoIBidqMHwdLbHxuY5ws1HiWkF4SsxgeCIiIiKisjE4UYPSyMkSm6YxPBERERFRxTA4UYPTyMkSG6d1hKuNAlfvZWDc9/8hieGJiIiIiIxgcKIGyc/JEpumdYKLtQJX7qVj3CqGJyIiIiIqHYMTNVh+BcP2XKwVuByvDU8PMnNNXRYRERER1UIMTtSgNXa2wsYi4Wns98cZnoiIiIioBAYnavCaFIQn5yI9T8kMT0RERERUBIMTEQrC03Md4WSlQGRcGsMTEREREelhcCIq4O9ihU3TOsDJSoGIuDQ8s/o/pGQxPBERERERgxORHn8Xa2x8rgOcrOS4FKvteWJ4IiIiIiIGJ6JimrpaFwzb04Yn9jwREREREYMTkQFNXa3x83Md4Wgpx8W7aRi/+gRSs/JMXRYRERERmQiDE1EpmhUJTxfupmL8mv+Qms3wRERERNQQMTgRGRHgpg1PDpZynL+TivGrGZ6IiIiIGiIGJ6IyaMNTB114msDwRERERNTgMDgRlUOgmw02TO0AewsznLuTiglrTiAth+GJiIiIqKFgcCIqpyB3G2yY2lEbnmJSMGE1wxMRERFRQ8HgRFQBwR7a8GRnYYazMSmYuOYE0hmeiIiIiOo9BieiCtKGpw6wszBDeHQKJjA8EREREdV7DE5ElRDiYYufpnSArbk2PE1ccwIZqnxTl0VERERE1YTByZSyk4GkG0BOGiCKpq6GKqi5py02TNWGpzMMT0RERET1miCKDesn9rS0NNja2iI1NRU2NjamLebMeuCvl7RfSxWApVPBy1n7snB8+LWlM2BZ5Hszc9PWTjoX76Zi7PfHkZaTj7a+9lg3uT2sFDJTl0VEREREZahINuBPd6akVgFyKyA3Q/t12l3tqzzMLPVDlmWxkFU0dFk4AjJ59X6WBkzb89QR41Ydx6nbyXh27QmsfZbhiYiIiKg+YY9TbZCbBWQlApn3gcykgl/vF2xLfPh94XtqVcXPobQtX0+WhRNg4QBIpFX/Oeu583dSMG7Vf0jPyUf7Rg5Y+2w7WDI8EREREdVaFckGDE51jSgCqvSCYFUkZBUPXZmJD4OXqK7gSYQi4aocwweVdoAgVMenrXPOxaTgmdUF4cnPAWsnMTwRERER1VYMTkbU+eBUURoNkJNSrOeqeOgq0rOV/aDi55CYacOVhVP5hg/KLet10Dobk4Lxq/5DukobntY92w4WcoYnIiIiotqGwcmIBhecKkqdrw1VuqGDBn4t+p4qreLnkJmXfyEMCyfATFn1n7OaFQ1PHfy0w/YYnoiIiIhqFwYnIxicqlheTpHeq8Qic7OKh66Cr/OzK34OuXXFFsKQ1o6AEh6djPGrtUuUd2zsgLWT2sNczrljRERERLUFg5MRDE4mJIpAbmaxRS8SjYcuTSWei2RuX0pPVvGhhM7a+VmS6nuc2ZnoZEwoCE+dGjtizaR2DE9EREREtQSDkxEMTnWIKAI5qfrzs0qsNFhsfpaoqdg5BGkpC2EUztkqFroUNhWen3X6drLu4bidmzhi9USGJyIiIqLagMHJCAanekyjBrKTDYcqQ6ErJ7Xi55DKDQSr4j1ZRd6TWwAATt9+gAmrTyAzV40u/o5YNYHhiYiIiMjUGJyMYHAinfxc/dUFjS3vnpWkfVBxRekeVOyEFMEWe2NEJKitYOPojqe6h0Ju4/IwdFk48UHFRERERDWIwckIBieqtNIeVGxweff7gDq34udQ2hbryXIuffggH1RMRERE9Egqkg1qx/JjRHWB3AKQ+wB2PmW3LfqgYr0l3O8jPu4uTkVchZ0mBT7KLHjLsyBkFTyoOCdV+3pwoxwFCdrwZGiIoKHhg3xQMREREVGlMTgRVQdBAJQ22pdjE7233AC43HqASWtPICtdjW7NnLHy1TZQ5qfr91gZem5W4a/ZDwCIBc/cSgLuXy67JomslGBVykIYcisGLSIiIqICHKpHZCL/3UzCpLUnkZ2nRvdmzvhufBiUZuUceleeBxUXXRCjUg8qVpavJ8uhsXaIIREREVEdwzlORjA4UW1y/GYSni0ITz0CnLHimQqEp4rIyymyqmBpKw0Wfp0A5OdU7PgOjQH3VoBbS+2v7q20oYqIiIioFmNwMoLBiWqbYzeSMHmdNjz1DHDGivFhUMhMuOiDwQcVl7K8e/o9bdAyxMYLcC8SpNxbAdbuHP5HREREtQaDkxEMTlQbHbuRhGfXnUBOnqZ2hKeKyEwC4s8BcYWv86UvbmHhVCRIFYQqez+GKSIiIjIJBicjGJyotjp6PRGTfziJnDwNegW6YPkzoXUnPBWXkwbEX9AGqfjz2l/vXwZETcm2ClttiCo6zM+pKZdaJyIiomrH4GQEgxPVZkeuJ2LyupNQ5WvQJ8gFS8fV4fBUXF42cC8CiDv7sHcqIcLw865k5oBbc/1hfs5BfEAwERERVSkGJyMYnKi2Kx6elo0Lg1wmMXVZ1SM/F0i8oj/ML/4CkJdZsq3EDHAJ0g9TriGA3LLm6yYiIqJ6gcHJCAYnqgv+vZaIKT8UhidXLBsXWn/DU3EaNZB0o2CYX5G5UzmpJdsKEsCpmf4wP7cWgLldjZdNREREdQ+DkxEMTlRXHL52H1N+OIXcfA0eD3bF0rENKDwVJ4pASnSRnqmCV2kr+tk30u+ZcmsFWDnXaMlERERU+zE4GcHgRHXJoav3MfVHbXjqF+KKb8eGwkzaQMOTIenxxcLUeSA12nBba4+SK/rZeHJFPyIiogaMwckIBieqaw5evY/nGJ7KL+uB/mp+ceeApOuG21o4lnxwr70fIOH1JSIiagjqVHBatmwZvvjiC8TFxSEkJARLlixB165dDbY9cOAAevbsWWJ7ZGQkAgMDy3U+Bieqiw5cScC09aeRm69B/xA3/N/YNgxPFaFKB+IvPgxS8eeBhEhAVJdsq7DRzpPSDfNrqZ1HJZXVfN1ERERUrepMcNq8eTPGjx+PZcuWoUuXLvjuu++watUqREREwMfHp0T7wuB05coVvQ/m7OwMqbR8SzYzOFFdtf9KAp7/8TRy1RoMaO6Gb55meHokeTlAwqWHQ/zizgH3LgFqVcm2MiXg2lx/mJ9LMCBT1HzdREREVGXqTHDq0KEDQkNDsXz5ct22oKAgDBs2DAsXLizRvjA4JScnw87OrlLnZHCiumz/5QQ8v14bnga1cMeSMa0ZnqqSOg9IvKo/byr+ApCbUbKtRKZdHt2t6CIUzbk8OhERUR1SkWxgsrEnubm5OH36NObMmaO3vW/fvjh69KjRfdu0aYOcnBwEBwfj3XffNTh8r5BKpYJK9fB/kNPS0h6tcCIT6hnoghXjQzF9/Rn8fSEOAPD1mNaQMTxVDamZ9tlQriFA67HabRoN8ODmwwf3Fs6dyk7Whqr4C8DZnwoOIABOTYut6NcCMLc31SciIiKiKmKy4JSYmAi1Wg1XV1e97a6uroiPjze4j7u7O1auXImwsDCoVCqsX78evXv3xoEDB9CtWzeD+yxcuBAffPBBlddPZCq9Al2x/JlQTP/ptDY8CcDXoxmeqo1EAjj5a18tntRuE0UgNUZ/mF/cOSAjXttjlXgVuPDrw2PY+eqHKfdWgJWLaT4PERERVYrJhurFxsbC09MTR48eRadOnXTbP/74Y6xfvx6XL18u13GGDBkCQRDw119/GXzfUI+Tt7c3h+pRnbcn4h5e2HAaeWoRQ1p5YPGoVgxPppZ+r6BH6uzDUJVy23Bba3f91fzcWwG2XlwenYiIqAbViaF6Tk5OkEqlJXqXEhISSvRCGdOxY0f89NNPpb6vUCigUHACN9U/fYJdsWxcGF7ccBr/OxcLAcAihifTsnYFrB8Hmj7+cFvWA+1wvqLD/BKvAelx2te1nQ/bmtuXfHCvQ2Muj05ERFQLmCw4yeVyhIWFYffu3Rg+fLhu++7duzF06NByHyc8PBzu7u7VUSJRrfd4sCuWjg3FjJ/P4K9zsRAE4KunGJ5qFQsHoHF37auQKgO4d1F/mN/9SO28qZsHtK9Ccusiy6MX9FA5BXB5dCIiohpm0n95Z82ahfHjx6Nt27bo1KkTVq5ciejoaEyfPh0AMHfuXNy9exc//vgjAGDJkiVo1KgRQkJCkJubi59++glbt27F1q1bTfkxiEyqb4gblo4NxYsbzuDPs7EAgEWjWkMq4ZCvWkthBfh01L4K5eVow5NuRb/z2nCVmw5EH9W+CsmU2gUsig71cwkGzJQ1/1mIiIgaCJMGp9GjRyMpKQkffvgh4uLi0Lx5c2zfvh2+vr4AgLi4OERHR+va5+bm4o033sDdu3dhbm6OkJAQ/P333xg4cKCpPgJRrdA3xA3fjg3FSz9rw5NEEPDlU60YnuoSMyXg0Ub7KqTOf7g8euEwv7jz2jB197T2VUgiA5wD9Yf6uTbXhjQiIiJ6ZCZ9jpMp8DlOVJ/tuBiHl34OR75GxIg2nviC4an+0WiA5Fv6z5qKOwdkPzDQWAAc/fWH+bm11A4fJCIiorrzAFxTYHCi+u6fC3F4aWM41BoRI0I98cWTDE/1nigCaXf1h/nFnQPSYw23t/MpGObX+mHvlHX5F+UhIiKqLxicjGBwooagaHgaGeqFz59syfDUEGUkaENUfJGeqeQow22tXIut6NdSG7C4PDoREdVjDE5GMDhRQ/H3+Ti8skkbnp4M88LnI1tCwvBE2SkPl0cvnDuVeBUQNSXbKu1KPrjXoQmXRycionqDwckIBidqSLadj8Wrm85CrRHxVJgXPmN4IkNyM4F7l/TnTCVEApq8km3lVtrl0Yuu6OccAEjNar5uIiKiR8TgZASDEzU0/zsXi5mbteFpVFsvfDqC4YnKIV+lDU/xRZ41FX8RyM8u2VaqAFyD9XumXEK4PDoREdV6FckGfIIiUT03pJUHAODVTeH45dQdCBCwcEQLhicyTqYAPFprX4XU+UDSdf2eqfjzgCoNiA3XvgoJ0iLLoxeu6NcCUFjX9CchIiKqEuxxImog/jx7F69tPguNCIxp541PhjM8URXQaICUKP3V/OLOAVmJBhoLgGMT/WF+7q24PDoREZkMh+oZweBEDVnR8PR0ex98PKw5wxNVPVEE0mL1h/nFndMumW6IrXfJFf2s3biiHxERVTsGJyMYnKih+yP8Lmb9og1PYzv44KOhDE9UQzITSw7ze3DTcFtLF/1hfu6tADtfhikiIqpSDE5GMDgRAb+H38Hrv5yDRgTGdfDBAoYnMpWc1ILl0Yv0TiVeKWV5dNsiw/xaa391bAJIpDVeNhER1Q8MTkYwOBFp/XbmDl7/9RxEEXimozY8CfzffKoNcrOAhAgg7qz+8ujq3JJtzSy0i04UHebnHAjI5DVeNhER1T1cVY+IyjQi1AuiCLyx5Rx+Oh4NAQI+HBrC8ESmJ7cAvNpqX4Xyc4H7lx8O8Ys7p+2pyssCYv7TvgpJ5YBLcJGhfq0B1xDAzLzGPwoREdUfDE5EDdjIMC+IAN7ccg7rj9+GIAAfPMHwRLWQTF4Qglo+3KZRFyyPfr5I79R5QJVa8P3Zh20FKeDUrNgiFC0AJUceEBFR+VRqqF5MTAwEQYCXlxcA4MSJE/j5558RHByMadOmVXmRVYlD9YhK+vVUDN7aeh6iCEzs5Iv5DE9UV4kikHJbfxGKuHNA5n3D7a09AFtPwNYLsPHUrvCn+94LsHTighRERPVYtc9x6tq1K6ZNm4bx48cjPj4eAQEBCAkJwdWrV/HKK6/gvffeq3Tx1Y3BiciwX07FYHZBeJrUuRHeHxLM8ET1gygC6fH6q/nFnQNSY8reV6rQBqmioap4wOJDfYmI6qxqn+N08eJFtG/fHgDwyy+/oHnz5jhy5Ah27dqF6dOn1+rgRESGjWrrDYjAW1vPY93RKAgC8N5ghieqBwQBsHHXvgL6P9ye9QBIvgWk3gFS72qfM5Uao/069Q6QcQ9Qq7RLppe2bDoAKGy1AUoXqrwevmwKtnGxCiKiOq9SwSkvLw8KhQIAsGfPHjzxxBMAgMDAQMTFxVVddURUo0a184YIEbO3XsDaI1EQIGDe4CCGJ6qfLBy0L88ww+/n5wLpcdoQVTRUpRUEq9Q7QE6Kdk5VQiqQcKn0c1m56oeq4gHL0gWQSKrlYxIRUdWoVHAKCQnBihUrMGjQIOzevRsLFiwAAMTGxsLR0bFKCySimjW6nQ9EEZjz2wWsOXILggC8O4jhiRogmRyw99W+SqPKKD1UFQau/Bxt71XGPSD2jOHjSMy0PWK23kVClWeR7z0BpR3nWxERmVClgtNnn32G4cOH44svvsDEiRPRqlUrAMBff/2lG8JHRHXXmPY+0IjA279fwOp/b0EA8A7DE1FJCivAOUD7MkQUtUMCU2MMh6rUu0B6LKDJA1Kita/SyK30Q5WNV7GA5cEl14mo9tFotL3zmYlAVqJ2sZ7MRCArCWg7WbsITx1R6QfgqtVqpKWlwd7eXrctKioKFhYWcHFxqbICqxoXhyAqvw3/3cY7v18EAEzr1hhzBwQyPBFVNXU+kBFfLFAVzLsqDFxZSeU7loXjwxUB9eZdFSxmYeUGSPkkEiJ6BKL4MAjphaEk7a9Fv88qaCOqDR9r6j7Aq5Th0jWk2heHyM7OhiiKutB0+/Zt/P777wgKCkK/fv0qc0giqoXGdfCFKALv/nERKw/dhABgDsMTUdWSyh7OdSpNbhaQFguk3SkZqgoXs8jL1AasrCTtqoGGCFLA2t1wqCoMXBYOHBJI1JCIIpCTWiQEJRYJQInFthd8rcmv+HkUNtreJQsnwNIZsHSsc6uSVio4DR06FCNGjMD06dORkpKCDh06wMzMDImJiVi0aBFeeOGFqq6TiEzkmY6+EAHM++Mivjt0ExCAOf0ZnohqlNwCcPLXvgwp/B9g3QqBRQPWHe33abHaH3bSCr4vbTV2mXkpoarIrwqr6vqkRPSoRBFQpRnoESolBGUmaocLV5TcWhuELAuCkIVjka+dSr4nU1T9Z61hlQpOZ86cweLFiwEAW7ZsgaurK8LDw7F161a89957DE5E9cz4jr6AKGLen5fw3cGbECBgdv8Ahiei2kIQAHN77cutheE2GjWQkVBy2fW0IgErMwHIzwaSrmtfpVHaFXuuVfEl2D0AqVm1fFSiBkcUAVW6fuDR9QiVMjxOnVvx88itivQIORXrHXLSf8/CCTBTVv1nreUqFZyysrJgba3tWtu1axdGjBgBiUSCjh074vbt21VaIBHVDuM7NYII4L0/L2HFwRuQCMCb/RieiOoMifTh86y82hpuk696OPyvRMAq+FWVpu3dykkB7l0o5WQCYO1WyhLsBT1ZFk5cgp0aJlEEcjMM9ADd1w611fUOFX6fqH2mXEWZWRoIQI4le4QKv+biMmWqVHDy9/fHH3/8geHDh2Pnzp147bXXAAAJCQlccIGoHpvQqRFEEXj/r0tYduAGBAF4oy/DE1G9IVMADo21r9LkpJWyQmCRr9UFz8BKjwPunjJ8HKlcf/ifwSXYbavncxJVJVEEcjNL6QEqZXhcfk7Fz2NmYXgIXGGPUPEwJLeo+s/awFVqVb0tW7Zg7NixUKvV6NWrF3bv3g0AWLhwIQ4dOoR//vmnygutKlxVj+jRrTtyC/P/FwEAeMzfCaE+dgj2sEGIhy287M0ZpIgaMo1G+4Nh8UBV9Pv0eADl+PFDYWNkCfaCwFUP5k1QLZSbWTLwGBweV/DKz674OWTmhnuEShseJ7es+s9JFcoGlV6OPD4+HnFxcWjVqhUkBV3tJ06cgI2NDQIDAytzyBrB4ERUNdYeuYUPCsJTUdZKGYLdtSFKG6Zs4O9iBTMph+QQUQF1nrY3qrQVAtPuANnJ5TuWpUvpKwTaegJWrtphitSw5WWX0gNUfHhcwXt5WRU/h0xZSo9Q0QUTioQjuSVXsKwFaiQ4Fbpz5w4EQYCnp+ejHKbGMDgRVZ2I2DScjHqAS7GpiIhLw9X4DOSqNSXayaUSNHOzQoj7wzAV5G4DSwWfJ0NEpcjNLD1UFQau8vwvv0QGWHsUW4K92GIW5vb8AbauycsuZYW44s8TKnjlZVb8HFJFyV6f4sPjioYjuRXvozqo2oOTRqPBRx99hK+++goZGRkAAGtra7z++ut45513dD1QtRGDE1H1yc3X4HpCBiLi0rRhKjYNEXFpSM8p+bwHQQAaOVrqglRhL5WzNYfdEFE5iKK2V6rECoF3Hi5ukRZb+oM3izKzLD1UFf7K+SLVKy/nYdAxODyu2Hu5GRU/h1ReEIAcS/YAGVowQWHNINQAVHtwmjt3LlavXo0PPvgAXbp0gSiKOHLkCObPn4/nnnsOH3/8caWLr24MTkQ1SxRFxDzIRkRcKi7FpiEiNg2XYtMQn2Z4YqyztQIhujBlixAPG/g4WEAi4T9eRFRB6nwg417JZdeLrhiYlVi+Y5k7GFghsMj31u7ahxmTVn5uKQskFHxffHhcbnrFzyGRlf7MIEPPE1LYMAhRCdUenDw8PLBixQo88cQTetv//PNPvPjii7h7925FD1ljGJyIaoekDFVBz1RaQaBKxc3ETBj6G8lKIUOQu7V23pS7DYI9bNDU1QoKGectENEjysvW9kzpLWARU2RJ9jvl690QJNrwVNoKgbbe2h/o6+oP7vm5D8NOeYbHqdIqfg6JrJRnCBVfMKEgHClt6+71pFqj2oOTUqnE+fPn0axZM73tV65cQevWrZGdXYmVRWoIgxNR7ZWVm4/L8em6IBURm4bL8elQ5ZecN2UmFeDvYl1kmJ8NgjxsYKPkQzeJqAqJIpCTanjZdd3XsYAmr+xjyZQPg1SJFQILvlZYV/9nArQLdBTt9TH4/KD7D8NRTmrFzyFIjTxQ1cBwOaUdgxDVuGoPTh06dECHDh3wzTff6G1/+eWXceLECfz3338VPWSNYXAiqlvy1RrcTMzEpdhUXLqbpuulSs02/EOKj4PFwzDlqZ035WKt4BLpRFR9NBogM6HYYhbFAlbGvfIdS2mrH6qKrxZo7QHI5CX3U+drw46x4XFF5wjlpFT8cwqShyHI4AIJxZ4npLTjQ46p1qv24HTw4EEMGjQIPj4+6NSpEwRBwNGjRxETE4Pt27eja9eulS6+ujE4EdV9oigiNjUHl+4WzJuK086duptiuLfb0VKOYA8b3bOmQjxs0MjRElLOmyKimpKv0vZM6VYILBqw7mrnYJWrV0cArFy0IUqqeDg8rrzLt+sdSqKdu1XimUGGFkxwZhCieqlGliOPjY3F0qVLcfnyZYiiiODgYEybNg3z58/HmjVrKlV4TWBwIqq/UrJydYtPFK7sd+N+JtSakn/NWcilCHSz1gtTzVytoTTjvCkiMhFVeikrBBb5Wq0ycgABsHAo54IJzoC5HZ9xRQ1ejT7Hqahz584hNDQUanU5lv40EQYnooYlJ0+NK4XzpgpW9rscl47svJJ/T0klAvydrbRD/Qp7qNxtYWvBeVNEVAuIonaYXWGQUufpD5czt2cQIqqgimQDrptJRPWa0kyKVt52aOVtp9um1oi4VTBvqnCY36XYNDzIzMWVe+m4ci8dv4U/XB3U085cF6YKe6fcbZWcN0VENUsQACtn7cujjamrIWpwGJyIqMGRSgT4u1jB38UKQ1t7AtDOm7qXptI9uPdSbBouxaUi5kE27qZoX7siHk7utrcw0/ZKuT8MU35OlpBJOf6fiIioPmJwIiICIAgC3GyVcLNVoneQq257anYeIov0Sl2KTcX1hAwkZ+XhyPUkHLmepGurkEkQ6P5wefQQDxsEutnAXM6hM0RERHVdhYLTiBEjjL6fkpLyKLUQEdU6tuZm6NjYER0bO+q2qfLVuHYvQ693KjIuDZm5apyLScG5mBRdW4kANHa2KhKmbBHsYQMHSwPLCRMREVGtVaHgZGtrW+b7EyZMeKSCiIhqO4VMiuaetmju+fDvRI1GxO0HWfpD/WLTkJihwvWEDFxPyMBf52J17d1tlbowFVww1M/L3pzzpoiIiGqpKl1Vry7gqnpEVJMS0nO0K/rFFg73S0VUUpbBtjZKWcG8KVvdYhT+LlYw47wpIiKiamGy5cjrAgYnIjK1DFV+kXlT2iXSr95LR5665F/HcpkEAa7W2t4pT+38qSB3G1gqOEWViIjoUTE4GcHgRES1UW6+BtcTMnQP7r0Um4bI2DSkq/JLtBUEwM/REkEeReZNudvA2VphgsqJiIjqLgYnIxiciKiuEEURMQ+ydQ/uLRzyF5+WY7C9i7Wi4FlTD8OUj4MFJBLOmyIiIjKEwckIBiciqusSM1TaOVNxhWEqFTcTM2Hob3MrhQxB7ta61fyC3W3QzNUachnnTRERETE4GcHgRET1UVZuPiLj0hERpw1SEbFpiIxPR26+pkRbM6kAfxdr3bOmgt21C1FYK81MUDkREZHp1KngtGzZMnzxxReIi4tDSEgIlixZgq5du5a535EjR9C9e3c0b94cZ8+eLff5GJyIqKHIV2tw436mdqjf3YLeqbg0pGbnGWzv42DxMEwVDPdzsVZwiXQiIqq36kxw2rx5M8aPH49ly5ahS5cu+O6777Bq1SpERETAx8en1P1SU1MRGhoKf39/3Lt3j8GJiKicRFHE3ZRs3bOmIgpW97ubkm2wvZOVHEHuDx/cG+JhAz9HS86bIiKieqHOBKcOHTogNDQUy5cv120LCgrCsGHDsHDhwlL3GzNmDJo2bQqpVIo//viDwYmI6BElZ+YiMq7wwb2piIhLw/WEDGgM/AthIZci0M1aL0w1c7WG0kxa84UTERE9gopkA5M9CCQ3NxenT5/GnDlz9Lb37dsXR48eLXW/tWvX4saNG/jpp5/w0UcflXkelUoFlUql+z4tLa3yRRMR1VP2lnJ09ndCZ38n3bacPDWuxKfrhanIuDRk5apxJjoFZ6JTdG2lEgH+zla6YX7BHjYIcbeFrQXnTRERUf1gsuCUmJgItVoNV1dXve2urq6Ij483uM+1a9cwZ84cHD58GDJZ+UpfuHAhPvjgg0eul4iooVGaSdHK2w6tvO1029QaEbcSM7VBqsjKfg8yc3HlXjqu3EvHb+F3de097cwfLo9e0DvlbqvkvCkiIqpzTP7o+eL/eIqiaPAfVLVajbFjx+KDDz5As2bNyn38uXPnYtasWbrv09LS4O3tXfmCiYgaMKlEgL+LFfxdrDC0tScA7d/b8Wk5D+dNxabhUlwqYh5k426K9rUr4p7uGPYWZrrFJ4LdC+ZNOVlCJuUS6UREVHuZLDg5OTlBKpWW6F1KSEgo0QsFAOnp6Th16hTCw8Px0ksvAQA0Gg1EUYRMJsOuXbvQq1evEvspFAooFIrq+RBERARBEOBuaw53W3P0Dnr493dqdp5u3lREwXC/6wkZSM7Kw5HrSThyPUnXVmkmQYDbw+XRQzxsEOhmA3M5500REVHtYLLgJJfLERYWht27d2P48OG67bt378bQoUNLtLexscGFCxf0ti1btgz79u3Dli1b4OfnV+01ExFR+dmam6FjY0d0bOyo25aTp8b1hAzdUL9Lsdp5U5m5apyLScG5mBRdW4kANC6cN1Wwsl+Ihw3sLeUm+DRERNTQmXSo3qxZszB+/Hi0bdsWnTp1wsqVKxEdHY3p06cD0A6zu3v3Ln788UdIJBI0b95cb38XFxcolcoS24mIqHZSmknR3NMWzT1tdds0GhG3H2TphalLsWlIzFDhekIGridk4M+zsbr27rbKIg/u1YYpL3tzzpsiIqJqZdLgNHr0aCQlJeHDDz9EXFwcmjdvju3bt8PX1xcAEBcXh+joaFOWSERE1UwiEeDnZAk/J0sMbumh256QnqMb5lc41C8qKQtxqTmIS83BnsgEXVsbpUx/3pSnDZo4W8GM86aIiKiKmPQ5TqbA5zgREdVdGap87bypu6m6Ff2u3ktHnrrkP2VymQQBrta6JdIL501ZKky+LhIREdUSdeYBuKbA4EREVL/k5msezpsqCFORsWlIV+WXaCsIgJ+jpe5ZUz0DXBDkzn8LiIgaKgYnIxiciIjqP41GxJ3kbL0wdSk2FffSVHrtBAEYGeqFN/sFwNVGaaJqiYjIVBicjGBwIiJquBIzVLoH956KeqCbJ2Uhl+KF7k3wXLfGUJpxCXQiooaCwckIBiciIioUHp2MBdsicCY6BQDgaWeO2QMCMaSlO1fpIyJqABicjGBwIiKiokRRxP/Ox+Gzfy7jbko2ACDUxw7zBgejjY+9iasjIqLqxOBkBIMTEREZkpOnxqrDN7HswA1k5aoBAMNae+Ct/oHwsDM3cXVERFQdGJyMYHAiIiJj7qXl4MudV7DlzB2IIqA0k2BatyaY3r0xLORcypyIqD5hcDKCwYmIiMrjwp1ULNgWgRNRDwAArjYKzO4fiGGtPSGRcP4TEVF9wOBkBIMTERGVlyiK2HExHp/8E4mYB9r5Ty29bPHe4GC0beRg4uqIiOhRMTgZweBEREQVlZOnxtojUVi6/zoyCh6sO6ilO+b0D4S3g4WJqyMiospicDKCwYmIiCrrfroKi3ZfwaaTMRBFQC6T4Lmufnihhz+sFJz/RERU1zA4GcHgREREjyoiNg0LtkXg2M0kAICztQJv9g3AyDAvSDn/iYiozmBwMoLBiYiIqoIoitgdcQ+fbI9EVFIWACDEwwbzBgejY2NHE1dHRETlweBkBIMTERFVpdx8DX48FoWv915Deo52/lP/EDfMHRgIX0dLE1dHRETGMDgZweBERETV4UFmLhbvvoqfT0RDrREhl0rwbJdGmNHLHzZKM1OXR0REBjA4GcHgRERE1enqvXQs2BaBw9cSAQCOlnLM6tsMo9t6QyaVmLg6IiIqisHJCAYnIiKqbqIo4sCV+/jo7wjcuJ8JAAh0s8a7g4LxWFMnE1dHRESFGJyMYHAiIqKakqfWYMPx21i85xpSs/MAAH2CXPD2wCA0drYycXVERMTgZASDExER1bSUrFx8vfca1h+7jXyNCJlEwIROjfBq76awteD8JyIiU2FwMoLBiYiITOXG/Qx88nck9l5OAADYWZjhtT7NMLaDD8w4/4mIqMYxOBnB4ERERKZ26Kp2/tPVexkAAH8XK7w7KAg9AlxMXBkRUcPC4GQEgxMREdUG+WoNNp2MwaLdV/EgMxcA0L2ZM94dFISmrtYmro6IqGFgcDKCwYmIiGqT1Ow8fLvvGtYdjUKeWoRUIuCZDj6Y2acZ7C3lpi6PiKheY3AygsGJiIhqo6jETHyyPRK7Iu4BAGyUMrzapxnGd/SFXMb5T0RE1YHByQgGJyIiqs2O3kjEgm2RiIxLAwD4OVninYFB6B3kAkEQTFwdEVH9wuBkBIMTERHVdmqNiC2nY/DFzqtIzFABALr4O+LdQcEIcue/XUREVYXByQgGJyIiqivSc/Kw7MANrP73FnLzNZAIwOh2Pni9bzM4WSlMXR4RUZ3H4GQEgxMREdU1MQ+y8Ok/l/H3hTgAgLVChpd6+WNSl0ZQyKQmro6IqO5icDKCwYmIiOqqE7ceYMG2CFy4mwoA8HGwwNsDA9EvxI3zn4iIKoHByQgGJyIiqss0GhG/hd/F5zsuIyFdO/+pg58D5g0ORnNPWxNXR0RUtzA4GcHgRERE9UGmKh/fHbyB7w7dhCpfA0EAngrzwht9A+BiozR1eUREdQKDkxEMTkREVJ/cTcnG5zsu48+zsQAAS7kUL/b0x5TH/KA04/wnIiJjGJyMYHAiIqL66PTtZCzYFoGzMSkAAE87c8wZEIjBLd05/4mIqBQMTkYwOBERUX2l0Yj43/lYfPbPZcSm5gAA2vraY97gYLTytjNtcUREtRCDkxEMTkREVN9l56rx/eGbWH7gBrLz1ACAEW088Vb/QLjZcv4TEVEhBicjGJyIiKihiE/NwRc7r2DrmTsAAHMzKZ7v3hjPd2sCcznnPxERMTgZweBEREQNzfk7KViwLQIno5IBAO62SrzVPwBDW3lCIuH8JyJquBicjGBwIiKihkgURWy/EI9Ptkfibko2AKCVtx3eGxyMMF97E1dHRGQaDE5GMDgREVFDlpOnxpojt7B033Vk5mrnPw1p5YHZ/QPgZW9h4uqIiGoWg5MRDE5ERERAQnoOvtp5Fb+cjoEoAgqZBM91bYwXejSBpUJm6vKIiGoEg5MRDE5EREQPXYpNxYJtETh+8wEAwMVagTf7BWBkqBfnPxFRvcfgZASDExERkT5RFLEr4h4+2R6J20lZAIDmnjaYNygYHRo7mrg6IqLqw+BkBIMTERGRYap8NX44GoX/23sd6ap8AMDAFm6YOyAI3g6c/0RE9Q+DkxEMTkRERMYlZqiwePdVbDwRDY0IyKUSTH7MDzN6NoG10szU5RERVRkGJyMYnIiIiMrncnwaPv47EoevJQIAnKzkeL1vAEa19YaU85+IqB5gcDKCwYmIiKj8RFHE/isJ+GhbJG4mZgIAAt2s8d7gYHT2dzJxdUREj4bByQgGJyIioorLU2uw/thtfL33GlKz8wAAjwe74u2BQfBzsjRxdURElcPgZASDExERUeUlZ+bi673XsP74bag1IsykAiZ2aoSXezeFrTnnPxFR3VKRbCCpoZpKtWzZMvj5+UGpVCIsLAyHDx8ute2///6LLl26wNHREebm5ggMDMTixYtrsFoiIqKGzd5SjvlPhGDnzK7oGeCMPLWIVf/eQo8v9mP9sSjkqzWmLpGIqFqYNDht3rwZM2fOxDvvvIPw8HB07doVAwYMQHR0tMH2lpaWeOmll3Do0CFERkbi3XffxbvvvouVK1fWcOVEREQNm7+LNdY+2x4/TG6Ppi5WSM7Kw7w/L2HA14dx8Op9U5dHRFTlTDpUr0OHDggNDcXy5ct124KCgjBs2DAsXLiwXMcYMWIELC0tsX79eoPvq1QqqFQq3fdpaWnw9vbmUD0iIqIqkq/WYOOJaCzafRXJWdr5Tz0DnPHOoGD4u1iZuDoiotLViaF6ubm5OH36NPr27au3vW/fvjh69Gi5jhEeHo6jR4+ie/fupbZZuHAhbG1tdS9vb+9HqpuIiIj0yaQSjO/UCAfe6Impj/lBJhGw/8p99FtyCPP/uoTkzFxTl0hE9MhMFpwSExOhVqvh6uqqt93V1RXx8fFG9/Xy8oJCoUDbtm0xY8YMTJ06tdS2c+fORWpqqu4VExNTJfUTERGRPlsLM7w7OBi7XuuGPkGuUGtErDsahR5fHsDaI7eQx/lPRFSHyUxdgCDoP0BPFMUS24o7fPgwMjIycPz4ccyZMwf+/v54+umnDbZVKBRQKBRVVi8REREZ19jZCqsmtsWR64lYsC0Cl+PT8cH/IrD++G28OygIPQNcyvy3noiotjFZcHJycoJUKi3Ru5SQkFCiF6o4Pz8/AECLFi1w7949zJ8/v9TgRERERKbRxd8Jf7/SFb+cisFXu67g5v1MTF53Cl2bOuHdQcEIcLM2dYlEROVmsqF6crkcYWFh2L17t9723bt3o3PnzuU+jiiKeos/EBERUe0hlQh4ur0P9r/RA9O7N4FcKsHha4kY8PUhvPP7BSRl8N9wIqobTDpUb9asWRg/fjzatm2LTp06YeXKlYiOjsb06dMBaOcn3b17Fz/++CMAYOnSpfDx8UFgYCAA7XOdvvzyS7z88ssm+wxERERUNmulGeYMCMTY9j74dEcktl+Ix4b/ovHX2Vi83NsfEzs3gkImNXWZRESlMmlwGj16NJKSkvDhhx8iLi4OzZs3x/bt2+Hr6wsAiIuL03umk0ajwdy5c3Hr1i3IZDI0adIEn376KZ5//nlTfQQiIiKqAB9HCywbF4b/bibhw20RuBSbhk+2X8aG/6Lx9sAg9A125fwnIqqVTPocJ1OoyFrtREREVH00GhFbz9zB5zuv4H66dshex8YOmDc4GCEetiaujogagopkAwYnIiIiMqlMVT6WH7iB7w/fhCpfA0EARrf1xqy+zeBirTR1eURUjzE4GcHgREREVDvdSc7CZzuu4H/nYgEAlnIpZvTyx+QuflCacf4TEVU9BicjGJyIiIhqt9O3H+DDbZE4F5MCAPCyN8fcAUEY2MKN85+IqEoxOBnB4ERERFT7aTQi/jx3F5/9cwXxaTkAgHaN7PHe4BC08OL8JyKqGgxORjA4ERER1R1ZuflYeegmVhy8gZw8DQBgZKgX3uofAFcbzn8iokfD4GQEgxMREVHdE5eajS92XMFv4XcBAOZmUrzQowme69oY5nLOfyKiymFwMoLBiYiIqO46G5OCBdsicPp2MgDA3VaJOQMC8UQrD85/IqIKY3AygsGJiIiobhNFEdvOx+HTfy7jbko2AKCNjx3mDQ5GqI+9iasjorqEwckIBiciIqL6ISdPjdX/3sLS/deRlasGAAxt7YHZ/QPhYWdu4uqIqC5gcDKCwYmIiKh+SUjLwZe7ruDX03cgioDSTIJpXRvj+e5NYKmQmbo8IqrFGJyMYHAiIiKqny7eTcWH2yJw4tYDAICrjQJv9gvEiDaekEg4/4mISmJwMoLBiYiIqP4SRRE7L8Xj4+2RiHmgnf/U0ssW8wYHo10jBxNXR0S1DYOTEQxORERE9Z8qX411R6Lwf/uuI0OVDwAY1MIdcwYEwtvBwsTVEVFtweBkBIMTERFRw3E/XYVFu69i88loaERALpNg6mN+eLGnP6w4/4mowWNwMoLBiYiIqOGJjEvDR39H4Mj1JACAk5UCb/RthqfaekPK+U9EDRaDkxEMTkRERA2TKIrYE5mAT7ZH4lZiJgAgyN0G8wYHoXMTJxNXR0SmwOBkBIMTERFRw5abr8H647fx9Z6rSMvRzn/qG+yKtwcGoZGTpYmrI6KaxOBkBIMTERERAcCDzFws2XMVG/6Lhlojwkwq4Nkufniplz9slGamLo+IagCDkxEMTkRERFTUtXvp+OjvSBy8eh8A4GApx6zHm2FMO2/IpBITV0dE1YnByQgGJyIiIjJk/5UEfPx3JK4nZAAAmrlaYd7gYHRt6mziyoioujA4GcHgRERERKXJU2vw83/RWLznKlKy8gAAvQNd8PagIDRxtjJxdURU1RicjGBwIiIiorKkZuXh673X8OOxKORrRMgkAsZ38sWrvZvCzkJu6vKIqIowOBnB4ERERETldeN+BhZuj8SeyAQAgJ2FGWb2bopxHX1hxvlPRHUeg5MRDE5ERERUUYev3cdH2yJx5V46AKCJsyXeHRyMngEuJq6MiB4Fg5MRDE5ERERUGflqDTafisFXu67iQWYuAKBbM2e8OygIzVytTVwdEVUGg5MRDE5ERET0KNJy8rB033WsOXILeWoRUomAse198NrjzeBgyflPRHUJg5MRDE5ERERUFW4nZWLh9svYcSkeAGCtlOHV3k0xoVMjyGWc/0RUFzA4GcHgRERERFXp2I0kLNgWgYi4NABAI0cLvD0wCI8Hu0IQBBNXR0TGMDgZweBEREREVU2tEbH19B18vvMKEjNUAIDOTRwxb3Awgtz58wZRbcXgZASDExEREVWXDFU+lh+4ju8P30JuvgYSARjdzhuzHg+As7XC1OURUTEMTkYwOBEREVF1i3mQhU93XMbf5+MAAFYKGV7q5Y9nuzSCQiY1cXVEVIjByQgGJyIiIqopJ6MeYMG2CJy/kwoA8HYwx9sDgtC/uRvnPxHVAgxORjA4ERERUU3SaET8Hn4Xn++8jHtp2vlP7f0c8N7gYDT3tDVxdUQNG4OTEQxOREREZApZuflYcfAmVh66gZw8DQQBeDLUC2/2C4CLjdLU5RE1SAxORjA4ERERkSnFpmTj8x2X8cfZWACAhVyKF3s0wdSujaE04/wnoprE4GQEgxMRERHVBuHRyfhwWwTCo1MAAJ525pg9IBBDWrpz/hNRDWFwMoLBiYiIiGoLURTx17lYfPbPZcSm5gAAQn3sMG9wMNr42Ju4OqL6j8HJCAYnIiIiqm1y8tT4/tBNLD94A1m5agDA8DaeeKt/ANxtzU1cHVH9xeBkBIMTERER1Vb30nLwxc4r2HL6DgBAaSbB892a4PnujWEhl5m4OqL6h8HJCAYnIiIiqu0u3EnFgm0ROBH1AADgZqPEW/0DMKy1JyQSzn8iqioMTkYwOBEREVFdIIoi/rkYj0+2R+JOcjYAoJWXLd4bEowwXwcTV0dUPzA4GcHgRERERHVJTp4aa49EYen+68hQ5QMABrd0x6zHm8HPyZIr8BE9AgYnIxiciIiIqC66n67Cot1XsOlkDAp/enO1USDUxx5hvvZo42OP5p42UMj4LCii8mJwMoLBiYiIiOqyiNg0fLrjMo5cT4Rao/9jnFwqQYinjS5MhfrYw81WaaJKiWo/BicjGJyIiIioPsjOVeP8nRScjk7GmdspCI9ORlJmbol2HrZKtPG1R5iPPUJ97RHsbgO5TGKCiolqHwYnIxiciIiIqD4SRRHRD7Jw+nYyzhSEqcvxaSjWKQWFTIIWnra64X2hvnZwsWavFDVMDE5GMDgRERFRQ5Gpyse5mBRtkIrW/pqSlVeinZe9ud7wvkB3a5hJ2StF9V+dCk7Lli3DF198gbi4OISEhGDJkiXo2rWrwba//fYbli9fjrNnz0KlUiEkJATz589Hv379yn0+BiciIiJqqERRxM3ETJy5rQ1S4dHJuHIvHcV/GlSaSdDSy04XpEJ97OBopTBN0UTVqM4Ep82bN2P8+PFYtmwZunTpgu+++w6rVq1CREQEfHx8SrSfOXMmPDw80LNnT9jZ2WHt2rX48ssv8d9//6FNmzblOieDExEREdFD6Tl5OBuTgjO3tT1S4dHJSMvJL9HO19FCG6J8tUEqwNUaMvZKUR1XZ4JThw4dEBoaiuXLl+u2BQUFYdiwYVi4cGG5jhESEoLRo0fjvffeK1d7BiciIiKi0mk0Im4mZmjnShWEqWsJGSXaWcilaFXYK+Vrhzbe9rC3lJugYqLKq0g2kNVQTSXk5ubi9OnTmDNnjt72vn374ujRo+U6hkajQXp6OhwcSn96tkqlgkql0n2flpZWuYKJiIiIGgCJRIC/izX8Xawxup12BFBqVh7CYx4O7zsbnYJ0VT6O3UzCsZtJun0bO1miTeFcKV87NHWxhlTCB/RS/WCy4JSYmAi1Wg1XV1e97a6uroiPjy/XMb766itkZmZi1KhRpbZZuHAhPvjgg0eqlYiIiKghs7UwQ48AF/QIcAEAqDUiridkPFzBLzoZN+9n4mai9rX1zB0AgJVChtbedrrhfW287WFrYWbKj0JUaSYLToUEQf9/IURRLLHNkI0bN2L+/Pn4888/4eLiUmq7uXPnYtasWbrv09LS4O3tXfmCiYiIiBo4qURAgJs1AtysMbaDtlcqOTNX2ytVMLzvXEwKMlT5+Pd6Iv69nqjb19/FquCZUnYI9bFHE2crSNgrRXWAyYKTk5MTpFJpid6lhISEEr1QxW3evBlTpkzBr7/+ij59+hhtq1AooFBwFRgiIiKi6mRvKUevQFf0CtT+HKfWiLgSn47T0ckIL+iZikrKwvWEDFxPyMDmUzEAABulDK197HVhqrW3HayV7JWi2sdkwUkulyMsLAy7d+/G8OHDddt3796NoUOHlrrfxo0bMXnyZGzcuBGDBg2qiVKJiIiIqIKkEgHBHjYI9rDB+I6+AICkDBXCo1NwOjoZZ24n4/ydVKTl5OPQ1fs4dPU+AEAQgGYu1rrhfaG+9mjsZFmuEUlE1alWLEe+YsUKdOrUCStXrsT333+PS5cuwdfXF3PnzsXdu3fx448/AtCGpgkTJuDrr7/GiBEjdMcxNzeHra1tuc7JVfWIiIiIaoc8tUbbK1VkrlTMg+wS7ewszNDG++FzpVp528FSYfIZJ1QP1JnlyAHtA3A///xzxMXFoXnz5li8eDG6desGAJg0aRKioqJw4MABAECPHj1w8ODBEseYOHEi1q1bV67zMTgRERER1V4J6Tk4c1u7et+ZaG2vlCpfo9dGIgABbjYIK5gnFepjD19HC/ZKUYXVqeBU0xiciIiIiOqO3HwNIuLScKagVyo8OgV3U0r2SjlaytHGp3AFP3u08rKDuVxqgoqpLmFwMoLBiYiIiKhui0/N0Q7tKwhTF++mIVet3ysllQgIcrcuWHRCG6a87M3ZK0V6GJyMYHAiIiIiql9U+Wpcin3YK3Xmdgri03JKtHOyUjwc3udrjxaetlCasVeqIWNwMoLBiYiIiKj+i03JLrLoRAoiYlORp9b/sddMKiDY3UbXIxXqaw8PWyV7pRoQBicjGJyIiIiIGp6cPDUu3k3VC1P301Ul2rnaKHSr97XxsUdzTxsoZOyVqq8YnIxgcCIiIiIiURRxJzm7yFypFETEpUGt0f/RWC6VIMTTRm+ulJut0kRVU1VjcDKCwYmIiIiIDMnOVeP8ncIH9GqXRE/KzC3RzsNWqTe8L9jdBnKZxAQV06NicDKCwYmIiIiIykMURUQ/yMKZ6GTtEL/bKbgcn4ZinVJQyCRo6WWrG94X6msHF2v2StUFDE5GMDgRERERUWVlqvJx7k6KbnjfmehkpGTllWjnZW+umysV6mOPQHdrmEnZK1XbMDgZweBERERERFVFFEXcSswsWHRCO7zvyr10FP8JW2kmQSuvhw/oDfWxg6OVwjRFkw6DkxEMTkRERERUndJz8nAu5uEKfuHRyUjLyS/RztfRAmE+9mjjqw1SAa7WkLFXqkYxOBnB4ERERERENUmjEXEzMQNnbqfowtS1hIwS7SzkUrT2LnxArx3aeNvD3lJugoobDgYnIxiciIiIiMjUUrPzcDZGG6TCo5NxNjoF6aqSvVKNnS1186RCfe3Q1MUaUgkf0FtVGJyMYHAiIiIiotpGrRFxPSGjyAN6k3HzfmaJdlYKGdr42GlX7yv41dbczAQV1w8MTkYwOBERERFRXZCcmavrlToTnYxzMSnIzFWXaOfvYlXwgF7tML8mzlaQsFeqXBicjGBwIiIiIqK6SK0RcSU+HaejkxFeEKaikrJKtLNRygp6pLRhqrW3HayV7JUyhMHJCAYnIiIiIqovkjJUCI9OwenoZJy5nYzzd1KRnaffKyUIQICrtW54X6ivPRo7WUIQ2CvF4GQEgxMRERER1Vd5ao22V6rIXKmYB9kl2tlbmD0MUj72aOVtB0uFzAQVmxaDkxEMTkRERETUkCSk5yA8OgVnCsLU+TupUOVr9NpIBCDQzUY3TyrUxx6+jhb1vleKwckIBiciIiIiashy8zWIjEsr8oDeFNxNKdkr5Wgp1/ZKFYSpVl52MJdLTVBx9WFwMoLBiYiIiIhI3720HJy5nawLUxfvpiFXrd8rJZUICHa30c2TCvWxh5e9eZ3ulWJwMoLBiYiIiIjIOFW+Gpdi03TD+87cTkF8Wk6Jds7WCt08qVBfe7TwtIXSrO70SjE4GcHgRERERERUcbEp2TgTXdgrlYKI2FTkqfWjhJlUQLCHrV6Y8rBV1tpeKQYnIxiciIiIiIgeXU6eGhfvphZZwS8F99NVJdq52Sh186Ta+NijuacNFLLa0SvF4GQEgxMRERERUdUTRRF3krMLhvYV9ErFpUGt0Y8bcqkEzT1t8M6gYIT52puoWq2KZIOGt1g7ERERERFVOUEQ4O1gAW8HCwxt7QkAyM5V4/ydFJyJTsHp28kIj05GUmYuzkSnwKKOrdDH4ERERERERNXCXC5Fh8aO6NDYEYC2Vyr6QRbORCejmau1iaurGAYnIiIiIiKqEYIgwNfREr6OlqYupcIkpi6AiIiIiIiotmNwIiIiIiIiKgODExERERERURkYnIiIiIiIiMrA4ERERERERFQGBiciIiIiIqIyMDgRERERERGVgcGJiIiIiIioDAxOREREREREZWBwIiIiIiIiKgODExERERERURkYnIiIiIiIiMrA4ERERERERFQGBiciIiIiIqIyyExdQE0TRREAkJaWZuJKiIiIiIjIlAozQWFGMKbBBaf09HQAgLe3t4krISIiIiKi2iA9PR22trZG2whieeJVPaLRaBAbGwtra2sIgmDqcpCWlgZvb2/ExMTAxsbG1OXUO7y+1YvXt3rx+lYvXt/qxetbvXh9qxevb/WqTddXFEWkp6fDw8MDEonxWUwNrsdJIpHAy8vL1GWUYGNjY/Ibpz7j9a1evL7Vi9e3evH6Vi9e3+rF61u9eH2rV225vmX1NBXi4hBERERERERlYHAiIiIiIiIqA4OTiSkUCrz//vtQKBSmLqVe4vWtXry+1YvXt3rx+lYvXt/qxetbvXh9q1ddvb4NbnEIIiIiIiKiimKPExERERERURkYnIiIiIiIiMrA4ERERERERFQGBiciIiIiIqIyMDhVo0OHDmHIkCHw8PCAIAj4448/ytzn4MGDCAsLg1KpROPGjbFixYrqL7SOquj1PXDgAARBKPG6fPlyzRRcxyxcuBDt2rWDtbU1XFxcMGzYMFy5cqXM/XgPl09lri/v4fJbvnw5WrZsqXu4YqdOnfDPP/8Y3Yf3bvlV9Pry3n00CxcuhCAImDlzptF2vIcrpzzXl/dw+c2fP7/EdXJzczO6T125dxmcqlFmZiZatWqFb7/9tlztb926hYEDB6Jr164IDw/H22+/jVdeeQVbt26t5krrpope30JXrlxBXFyc7tW0adNqqrBuO3jwIGbMmIHjx49j9+7dyM/PR9++fZGZmVnqPryHy68y17cQ7+GyeXl54dNPP8WpU6dw6tQp9OrVC0OHDsWlS5cMtue9WzEVvb6FeO9W3MmTJ7Fy5Uq0bNnSaDvew5VT3utbiPdw+YSEhOhdpwsXLpTatk7duyLVCADi77//brTNW2+9JQYGBupte/7558WOHTtWY2X1Q3mu7/79+0UAYnJyco3UVN8kJCSIAMSDBw+W2ob3cOWV5/ryHn409vb24qpVqwy+x3v30Rm7vrx3Kyc9PV1s2rSpuHv3brF79+7iq6++Wmpb3sMVV5Hry3u4/N5//32xVatW5W5fl+5d9jjVIseOHUPfvn31tvXr1w+nTp1CXl6eiaqqf9q0aQN3d3f07t0b+/fvN3U5dUZqaioAwMHBodQ2vIcrrzzXtxDv4YpRq9XYtGkTMjMz0alTJ4NteO9WXnmubyHeuxUzY8YMDBo0CH369CmzLe/hiqvI9S3Ee7h8rl27Bg8PD/j5+WHMmDG4efNmqW3r0r0rM3UB9FB8fDxcXV31trm6uiI/Px+JiYlwd3c3UWX1g7u7O1auXImwsDCoVCqsX78evXv3xoEDB9CtWzdTl1eriaKIWbNm4bHHHkPz5s1Lbcd7uHLKe315D1fMhQsX0KlTJ+Tk5MDKygq///47goODDbblvVtxFbm+vHcrbtOmTThz5gxOnjxZrva8hyumoteX93D5dejQAT/++COaNWuGe/fu4aOPPkLnzp1x6dIlODo6lmhfl+5dBqdaRhAEve9FUTS4nSouICAAAQEBuu87deqEmJgYfPnll/xLrwwvvfQSzp8/j3///bfMtryHK66815f3cMUEBATg7NmzSElJwdatWzFx4kQcPHiw1B/uee9WTEWuL+/diomJicGrr76KXbt2QalUlns/3sPlU5nry3u4/AYMGKD7ukWLFujUqROaNGmCH374AbNmzTK4T125dzlUrxZxc3NDfHy83raEhATIZDKDCZ0eXceOHXHt2jVTl1Grvfzyy/jrr7+wf/9+eHl5GW3Le7jiKnJ9DeE9XDq5XA5/f3+0bdsWCxcuRKtWrfD1118bbMt7t+Iqcn0N4b1butOnTyMhIQFhYWGQyWSQyWQ4ePAgvvnmG8hkMqjV6hL78B4uv8pcX0N4D5ePpaUlWrRoUeq1qkv3LnucapFOnTrhf//7n962Xbt2oW3btjAzMzNRVfVbeHh4reoCrk1EUcTLL7+M33//HQcOHICfn1+Z+/AeLr/KXF9DeA+XnyiKUKlUBt/jvfvojF1fQ3jvlq53794lViF79tlnERgYiNmzZ0MqlZbYh/dw+VXm+hrCe7h8VCoVIiMj0bVrV4Pv16l710SLUjQI6enpYnh4uBgeHi4CEBctWiSGh4eLt2/fFkVRFOfMmSOOHz9e1/7mzZuihYWF+Nprr4kRERHi6tWrRTMzM3HLli2m+gi1WkWv7+LFi8Xff/9dvHr1qnjx4kVxzpw5IgBx69atpvoItdoLL7wg2traigcOHBDj4uJ0r6ysLF0b3sOVV5nry3u4/ObOnSseOnRIvHXrlnj+/Hnx7bffFiUSibhr1y5RFHnvPqqKXl/eu4+u+KpvvIerVlnXl/dw+b3++uvigQMHxJs3b4rHjx8XBw8eLFpbW4tRUVGiKNbte5fBqRoVLl1Z/DVx4kRRFEVx4sSJYvfu3fX2OXDggNimTRtRLpeLjRo1EpcvX17zhdcRFb2+n332mdikSRNRqVSK9vb24mOPPSb+/fffpim+DjB0bQGIa9eu1bXhPVx5lbm+vIfLb/LkyaKvr68ol8tFZ2dnsXfv3rof6kWR9+6jquj15b376Ir/YM97uGqVdX15D5ff6NGjRXd3d9HMzEz08PAQR4wYIV66dEn3fl2+dwVRLJh9RURERERERAZxcQgiIiIiIqIyMDgRERERERGVgcGJiIiIiIioDAxOREREREREZWBwIiIiIiIiKgODExERERERURkYnIiIiIiIiMrA4ERERERERFQGBiciIqIKEAQBf/zxh6nLICKiGsbgREREdcakSZMgCEKJV//+/U1dGhER1XMyUxdARERUEf3798fatWv1tikUChNVQ0REDQV7nIiIqE5RKBRwc3PTe9nb2wPQDqNbvnw5BgwYAHNzc/j5+eHXX3/V2//ChQvo1asXzM3N4ejoiGnTpiEjI0OvzZo1axASEgKFQgF3d3e89NJLeu8nJiZi+PDhsLCwQNOmTfHXX39V74cmIiKTY3AiIqJ6Zd68eRg5ciTOnTuHZ555Bk8//TQiIyMBAFlZWejfvz/s7e1x8uRJ/Prrr9izZ49eMFq+fDlmzJiBadOm4cKFC/jrr7/g7++vd44PPvgAo0aNwvnz5zFw4ECMGzcODx48qNHPSURENUsQRVE0dRFERETlMWnSJPz0009QKpV622fPno158+ZBEARMnz4dy5cv173XsWNHhIaGYtmyZfj+++8xe/ZsxMTEwNLSEgCwfft2DBkyBLGxsXB1dYWnpyeeffZZfPTRRwZrEAQB7777LhYsWAAAyMzMhLW1NbZv3865VkRE9RjnOBERUZ3Ss2dPvWAEAA4ODrqvO3XqpPdep06dcPbsWQBAZGQkWrVqpQtNANClSxdoNBpcuXIFgiAgNjYWvXv3NlpDy5YtdV9bWlrC2toaCQkJlf1IRERUBzA4ERFRnWJpaVli6FxZBEEAAIiiqPvaUBtzc/NyHc/MzKzEvhqNpkI1ERFR3cI5TkREVK8cP368xPeBgYEAgODgYJw9exaZmZm6948cOQKJRIJmzZrB2toajRo1wt69e2u0ZiIiqv3Y40RERHWKSqVCfHy83jaZTAYnJycAwK+//oq2bdvisccew4YNG3DixAmsXr0aADBu3Di8//77mDhxIubPn4/79+/j5Zdfxvjx4+Hq6goAmD9/PqZPnw4XFxcMGDAA6enpOHLkCF5++eWa/aBERFSrMDgREVGdsmPHDri7u+ttCwgIwOXLlwFoV7zbtGkTXnzxRbi5uWHDhg0IDg4GAFhYWGDnzp149dVX0a5dO1hYWGDkyJFYtGiR7lgTJ05ETk4OFi9ejDfeeANOTk548skna+4DEhFRrcRV9YiIqN4QBAG///47hg0bZupSiIionuEcJyIiIiIiojIwOBEREREREZWBc5yIiKje4OhzIiKqLuxxIiIiIiIiKgODExERERERURkYnIiIiIiIiMrA4ERERERERFQGBiciIiIiIqIyMDgRERERERGVgcGJiIiIiIioDAxOREREREREZfh/Pmk6Cgvc0A8AAAAASUVORK5CYII=",
      "text/plain": [
       "<Figure size 1000x500 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# Calculate and print accuracies for training and cross-validation sets\n",
    "model.eval()\n",
    "with torch.no_grad():\n",
    "    # Training set accuracy\n",
    "    tr_correct = 0\n",
    "    tr_total = 0\n",
    "    for images, labels in trLoader:\n",
    "        outputs = model(images)\n",
    "        _, predicted = torch.max(outputs, 1)\n",
    "        _, true_labels = torch.max(labels, 1)\n",
    "        tr_total += labels.size(0)\n",
    "        tr_correct += (predicted == true_labels).sum().item()\n",
    "    \n",
    "    tr_accuracy = 100 * tr_correct / tr_total\n",
    "    \n",
    "    # Test set accuracy\n",
    "    cv_correct = 0\n",
    "    cv_total = 0\n",
    "    for images, labels in cvLoader:\n",
    "        outputs = model(images)\n",
    "        _, predicted = torch.max(outputs, 1)\n",
    "        _, true_labels = torch.max(labels, 1)\n",
    "        cv_total += labels.size(0)\n",
    "        cv_correct += (predicted == true_labels).sum().item()\n",
    "    \n",
    "    cv_accuracy = 100 * cv_correct / cv_total\n",
    "\n",
    "print(f'Accuracy on training set: {tr_accuracy:.2f}%')\n",
    "print(f'Accuracy on cross-validation set: {cv_accuracy:.2f}%')\n",
    "\n",
    "# Plot training and cross-validation losses\n",
    "plt.figure(figsize=(10, 5))\n",
    "plt.plot(range(1, num_epochs+1), train_losses, label='Training Loss')\n",
    "plt.plot(range(1, num_epochs+1), cv_losses, label='Cross-Validation Loss')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.title('Training and Cross-Validation Loss')\n",
    "plt.legend()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "计算测试精度"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Accuracy on test set: 85.94%\n"
     ]
    }
   ],
   "source": [
    "model.eval()\n",
    "with torch.no_grad():\n",
    "    test_correct = 0\n",
    "    test_total = 0\n",
    "    for images, labels in teLoader:\n",
    "        outputs = model(images)\n",
    "        _, predicted = torch.max(outputs, 1)\n",
    "        _, true_labels = torch.max(labels, 1)\n",
    "        test_total += labels.size(0)\n",
    "        test_correct += (predicted == true_labels).sum().item()\n",
    "    test_accuracy = 100 * test_correct / test_total\n",
    "    print(f'Accuracy on test set: {test_accuracy:.2f}%')"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.14"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
