{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 数据处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# 安装类库\n",
    "# !mkdir /home/aistudio/external-libraries\n",
    "# !pip install imgaug -t /home/aistudio/external-libraries\n",
    "import sys\n",
    "sys.path.append('/home/aistudio/external-libraries')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "image shape: (32, 32, 3)\n",
      "label value: cattle\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAMgAAADFCAYAAAARxr1AAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAGylJREFUeJztnWlsXOd1ht9zZ+Um7pQoURIteZUVW04c17HjVFmcOGkAJ0VhJGgDA3WWAgnaoPljuECbAv2RAk2CoghSJKhrB0jjpHFSu47T2HGdOnYTWbIta7MsibQW7hSX4XAWznK//phhyuF7eDniSBQpnwcQxDm8c+937/DMved857yfOOdgGIaOd7kHYBhrGXMQwwjAHMQwAjAHMYwAzEEMIwBzEMMIwBzEMAIwBzGMAGpyEBG5R0TeFJFTIvLgxRqUYawVZKUz6SISAnACwN0ABgDsB/Bp59yxpd7T0dHhent7V3Q8Yzn4c8zPzZEtlU6TrbFpg7rHcDhc+7BWgK/YisWCuu3cXJZsoTB/7+dylduNjYwjMZ2U5cZSyxW4DcAp51w/AIjIYwDuBbCkg/T29uLAgQM1HNJYkiI7w8jZPrLte/lVst31oXvUXba1d9Q+rmUoKrZ0ka3J2Un1/f19b5Cttb2BbGfPnqx4/eefe6iq8dXyiLUFwLkFrwfKtgpE5PMickBEDoyPj9dwOMNYfS55kO6c+45z7lbn3K2dnZ2X+nCGcVGp5RFrEMDWBa97yrYLwqqJLxxfeR6X/BTZkmP9ZHv+yZ/wdkl+jgeAP/nsZ9mofF6+r3yGylevAz/y55X3Dg2fJdvk9IA6xuFzR8nWf/I82RIzlddnLptS97eYWu4g+wFcIyJXiUgUwKcAPFnD/gxjzbHiO4hzriAiXwLwCwAhAA8759idDWMdU1Mezzn3NICnL9JYDGPNYTPphhHA5ZkJWgaRZedv3jZoKQxPlNmDYpLfm+G0eoOfI9vE8Ih67NGRUbKFhL9Tm1uayRaJRsjmK0G6czwtGOa3Il/MqGNs39hOttFxDtKH+4Yq95fPq/tbjN1BDCMAcxDDCMAcxDACMAcxjADWZJC+WmhVo87nor/CFAd9mcQsvzfKRXIbtmzWD64Eu6IErJ7Ps+Yzw+fIdvrIb8n21hvHeX9eVNkfz1wDwK+efpxsrZu3ku2OO+/iN4e5QnhiOkG2uVlOEGSzY2RzBU5CAMDYJFcLTE3z5+X8xde7ukSQ3UEMIwBzEMMIwBzEMAIwBzGMAN7WQTp8npE+f4oD27FXXiRbepIDzpEcf99ce9de9dDX3Hwr2bwIfxyHjx4m22vPP0+2pBK4z4zxTHgkHCNbdmKIbADw/M/OkO2G3/8I2d7zvg/yPud4xn5qjPfXv59L+UaHuBOyffs2dYxpn8vW82m+jlGvq+K1VPmnb3cQwwjAHMQwAjAHMYwAzEEMIwBzEMMIoKYsloicBpBESd6o4Jzj1MwaxmW5rGTiTc6gYHqGTG0hRcjM48xN/wvPqscOOy51iG/mTM33fvyfZDt64CDZdrRymUubx2NsUDJlxZDSgAGg/wRnt1488WOydffcSLa7bruBbOPH/5dsrz/zU7LNTbMARWpwlzrG+l3vYlsd63k1XdVa8Toaq04+4WKked/vnOPiF8O4ArBHLMMIoFYHcQCeEZFXROTz2gamrGisZ2p1kPc6594J4KMAvigi71u8gSkrGuuZWmV/Bsv/j4nIT1EStH7hgnZyGfUZvCj3RjR2cf/G+MBbZMuOs9JfQ5T7OWay+gke/61SvtK6nWzPPPMSb5fk3ogmr5ttrXGypeY4cD9+VhdtGEmxZMTABAfQ33/kX3m7g11kS59j4fKGIpeKxOq4HGYuxar0ALC9kQNyb+PVZMtK5Wcd0pQhFFZ8BxGRBhFpmv8ZwIcBHFnp/gxjLVLLHWQjgJ+WJXrCAP7NOfdfF2VUhrFGqEV6tB/AzRdxLIax5rA0r2EEcPn7QTTpwGoD96VWTqjy/U5ZYmzTO/immJ+dJlvf2TfJlp7kNHYuVqce+8QJXhkp1cjqgeE8n+TMBK+2lFBWVYpv58B9ZoqD7ENn9CB9PMdJjKZmVlE8e+p1su2b5CUVrungwDga4fObnmNbU5d+HYeHuA9mQ30bH6dtkQKjVLfsht1BDCMAcxDDCMAcxDACMAcxjAAue5CuxUpKJfgS772A9Q2VJRVEWR8vEuPZ5y233cn7UyZih1/lWe8eRYkQACbOs2DEoX2vka0uzIF7RxMHz3vv4jH+3s1cIv5P3/oW2ZIZLtMH9GuhKRymlVnu2FZelsB3HLiPjnErQbh1I9mkQS9Tev0otyckXmHhje4dOypep2b4uBp2BzGMAMxBDCMAcxDDCMAcxDACWPUgffGi85qH+krwnc1x/3hUmQkH9HX0PG16XQncC8r0fN8kdxRPKQHs3LW7yXbju+5Qx5g/y7PhP/rZL3m7DJeDf/KevWT7w49/mGwnT/HSAGMpTg7kXEgdY8TxttEwb9sU52vR0MJBdSLP59KwkWf7XR0vnTAwri9/UMxwEiOnaAg8/2RloXlymqsjNOwOYhgBmIMYRgDmIIYRgDmIYQSwbJAuIg8D+DiAMefc7rKtDcAPAfQCOA3gPucc11EvwncOc/nKWdu40hc+k+b1/17av49sGxob1ePccuNNZGuqqydbscj92YPjLJb2qxc5eH7rLK/rN6fMSMc296pjLCR5VnnsDC8PMJvka7Gzl2fnw+CAejrBwWrO5yC7UNRWawT8NAfGnuMSglCcP8OJSf5zGB3jZEedsq5jQzMnZBpbeDsAaFKSBnVhTrRs7WipeN13Tl/yYTHV3EEeAXDPItuDAJ5zzl0D4Lnya8O44ljWQZxzLwBYnJO8F8Cj5Z8fBfCJizwuw1gTrDQG2eicGy7/PIKSgIPKQuG48yYcZ6wzag7SnXMOSze/VgjHdZhwnLHOWOlM+qiIdDvnhkWkGwCv/K4gAsiioGpmloPQ/QdfJdvZ4UGyxaIsMAYAnW0sJnZd706yJWYmyHbwIAu6DZ8+RraRsxxwjk3xuRw8zIrmAHBbz/Vk27GJv0Cm2ri/urmDZ5/PDXFf+fAwB6KpJAfPLY16v3dqloP0mSmuANjR1UO2xjj/aaXrFGX5AidKiikeY9HTy9NzrVxWjzAnLJqbK88xHKru3rDSO8iTAO4v/3w/gCdWuB/DWNMs6yAi8gMAvwFwnYgMiMgDAL4G4G4ROQngQ+XXhnHFsewjlnPu00v8itf+NYwrDJtJN4wAVrXc3flAca4ygHpp38u03StHD5Ft5/UcCA6dS6jH+Y+nniPbxz+WJ1vfaRZv6zvHSu5eiMu5J5VZ4cGB02SLF9+tjvEdvb1k+7M//QzZtNnwnS0s3jY0xEmMk4c5uZCc4FR7c7sS6AIoFpQydmXSfUtrE9mcshyd+PzmkMcJ0FBIaUPI8+cHAGlF1C8U5pn9ol+ZDHDQqwcWY3cQwwjAHMQwAjAHMYwAzEEMIwBzEMMIYFWzWEW/iORsZebpv1/gXov2zVwqMpfl/okz/bpsvyiZkZcPserhESVbJsolCWmXKcw9C3s/uIdsXa1cKgIAhTRneXZfdx3ZPGW5goFfcJau7jxnc+5u4nUCN13LvTIHxofJBgDH67j3o7eHy1w6lbKSbJbLVLS+E9/n7JS2fmAsrJfD5JSelajS++NF9LKk5bA7iGEEYA5iGAGYgxhGAOYghhHAqgbp4gkiDZXBUnMbCy8MDrKk/aHXeQn2M6e4/wIAuns4oGvfxCUbvs+9CFOTvM+IEvT37lAC4M1ccpGZ00skclkO0ouK6EPmNJeQpE9zUJ1IcDBfp5SkvHsbl+x0x3jcALBhgvtJwq0snuBH+Dq6IgfaogTkxTwnX0SLpxWxidI+ufejMMf7jHqL329rFBpGzZiDGEYA5iCGEYA5iGEEsFJlxa8C+ByA+eaCh5xzTy+3r1Q6i32vVfZgFBXp/VCIh/VWP/dpDA7qQXpjK4sfFIutZEsmeW09LUi/Sglsuzo5SB8YOEG21rAusx+5kRMJ4QRL+Z87eJRsR2d4GYGfHePtEj4Hqy1xnmX+8HW3qmO8I8oKjudGT5Mt1MwBeaGeezrySvDsfE5MOJ8/fy3wBoBiUZmJd8qM/eKlMqpc33KlyooA8E3n3J7yv2WdwzDWIytVVjSMtwW1xCBfEpFDIvKwiPDzS5mFyoqJKlf1MYy1wkod5NsAdgLYA2AYwNeX2nChsmJzS8tSmxnGmmRFM+nOudH5n0XkuwCequZ9c7kM3jp9uHIAilR9VzuXu4vSZB+v02dXP/SBj5Dt+l07yFacYwXHrjZFOr97G9k623j2ecdWLlff1rlZHaMm7JcY4uUPJmZYtLIfHJg23cRl7IUMVw9MT7LQxRNnWNwBAG7s4tL2q7Rp7hFOLmSaeYbbFbhFoFDgIN3Pc9BfXGLmO53lpEq8QVlbsW7xuC/hTHpZbnSeTwLgOhDDuAKoJs37AwB7AXSIyACAvwGwV0T2oOSGpwF84RKO0TAuGytVVvyXSzAWw1hz2Ey6YQSwquXu0aiPzb2VAV1rB8/s5vMcuH3kD1ihcGKCg0MACMc5SMvleJ+33HIj2bIpDiSHlKUO9tzA793Zu51s0+d12f7hES4lnzw3QDbvat7nXe/fS7asx4HtzCxfnwJfGhx98zAbAZx98xTZukIc3G7wOIHifN7OE95OlJYDpwyysERMnVMUF8NFRZmxUHktnDLbrmF3EMMIwBzEMAIwBzGMAMxBDCOAVQ3Sk6kEXtj/8wpbQQnItvVyufqeO3aR7UyfLhznCQe7k7O8HqFf5Jn4ZIKDxokZDrRffp1npI/38ez64KAepMeV8u3rY7wMgdfAM/EjSln8S/t/TbaCEodGYlxmn5jVVx/ORfj6JOKcDAiHeLs0+PyKSv94aHEZOoCwYssraxkCgCf8HR8K83iyc5XJF19JIqj7r2orw3ibYg5iGAGYgxhGAOYghhHAqgbpsXgYO6+uDETzSrlz1yZtVphLwZMpvdExHOaS7HyR19tLJDmAzitTtm09nDSIxDhID8W5V3z79fp3kF9ke1OYg/xfv8jrKB49yWJyTU3cayOeorqe40qBiWn9OvqO3+8UtfqkokCfyXG/vwjPcEejvJ6gZsso6v4AEI7y34rn8bUtUILAgnTDqBlzEMMIwBzEMAIwBzGMAKrpKNwK4HsANqIU2XzHOfePItIG4IcAelHqKrzPOcfR2gIa6uK4dU9l3/asUpJ97NjrZJuc5l1fv2u3epymxg3amZBlbJwDtXyOt0tO8zJfMymefW5v26TYdMGX2Sx/N8VDHGiH6zlwL+b5mkWFVfLrG1mJ3VMSAdPj59QxtnT3kq01yn8yiUkWzPOFky+xGAffnhK4Fwpcwq61QABAg7LcWlEpIWhorFS69zxddJDGV8U2BQBfcc7tAnA7gC+KyC4ADwJ4zjl3DYDnyq8N44qiGuG4Yefcq+WfkwDeALAFwL0AHi1v9iiAT1yqQRrG5eKCYhAR6QVwC4B9ADY65+ZXchlB6RFMe8/vhOOmJ3mewDDWMlU7iIg0AngcwJedcxUzbM45hyVmXhYKx7W08TOxYaxlqnIQEYmg5Bzfd879pGwendfHKv/PCmeGsc6pJoslKMn8vOGc+8aCXz0J4H4AXyv//8Ry+yr6BSRmKwUQPHBZyEyCsxDHj3PW6FT//6jH6dnGyow37dlJtm3KdnUeZ8CcIgJQVPpYohHutRCuhAAA1Gf4httdz2O8ZQ9naTqaudzjpRdeIltiirWQtf6b8UH9u801cH9K8VoeI5TrowlnxMJ8MTIpLknxi9z7EY3r3+UhRXEzl1GUKRZXGlVXaVJVLdadAD4D4LCIHCzbHkLJMX4kIg8AOAPgvuoOaRjrh2qE416ENolQ4oMXdziGsbawmXTDCMAcxDACWNV+EE+A+milTzqfg6w7b38X2XbuvIFs/WdOq8cZG2fRhukJRSY/wgmC0QwnA1paOHBvauKSDRdRylRmuG8EANoaeN3Dzi7uO0lu5cB//29+Q7aJaVZ/9JVrqyHcKgMAaGvjX7Rt4XKYlPI1G1HEFKLachXC0XImw6U0ztOj6oKizKiddnrRPqu9NnYHMYwAzEEMIwBzEMMIwBzEMAJY1SAd4uCFKoMqL6LI6SsL03ds2kK2G3br6/9lsxzk+Yqq3/D5YbKNJTjYHZsZJdumbg6om5s5qPWX6DuYzfN300T2ZbINTrKwxJFjPGs+l+Vxx+NLRN+LaGjWA+CtbUrvR/Is2bwWPk5LhKsUfHBPhyqw4Pizmk3q1zHkKYG/sgAkTfYvNbO3CLuDGEYA5iCGEYA5iGEEYA5iGAGsapCezc3hxFDlunfNLTwjHctxYLohzs1WrcpsNgDEldJoDywY0NXK5dyRMM9czyR5dj3kOMqbmeby8tFxXnYBABKjrBR5qoPFKnqabyHbH9/3PrId3s/v1dZlbGllEYk5pUwfANw0VwEcOXaIbL2dLBjR3sAl+QVFCXNCKW3fEOHZeqeIOwDAbIIFNeL1/LdSv6FyjJ6nVzgsxu4ghhGAOYhhBGAOYhgBmIMYRgC1KCt+FcDnAMxHsA85554O2lfRL2J6tjIAzxZY1j6mLC2Qb2omW3J2KXU8LmWur+PArbG+m2zxKAecnc1c7p5X1A215RQGTg2pIwwrSxMcGmWFw3PKZPi1US79b1Ouz+YurjTwlPLwbL0eAE9EuFd9CzgxUhfmY9c1KIqQaT6ZfJFVFHNZXqIhn9PXKEwrypyxGB+7tbVS9TIUrk5jpJos1ryy4qsi0gTgFRF5tvy7bzrn/qGqIxnGOqSanvRhAMPln5MiMq+saBhXPLUoKwLAl0TkkIg8LCKqSvNCZcVUgm+nhrGWqUVZ8dsAdgLYg9Id5uva+xYqKzYoVbqGsZapaiZdU1Z0zo0u+P13ATy13H6ikTh6Nl5dYSsoUvWeUq6cyfCs8Ni0rvWrzXxv3c5LE6QVOf5skvfZ2KjMFLcrs/ARFnnbsV1f/6++kQPW/j4u3Y6FlSUMuvmatWzkRMLsLM8yh4ocAO+88WqyAYB/nMvO8wUedzymLEHg8RjbG3m7cITPeeo8Vx+Iz/oBAJDO8FNJOMbbeqHKP3VtvUSNZe8gSykrzsuOlvkkgCNVHdEw1hG1KCt+WkT2oJT6PQ3gC5dkhIZxGalFWTFwzsMwrgRsJt0wAljVcnfnisgVKoPgWIxLrRvquNy5WOCZ1HSClcEBoKGeA79ingPyyTSvexhX1uDTFNp9jwPYdI5n9rs2aeslAvX1HLBu2qSUiBf5OHM+zx63t3EPeCbB28UjnHAI1fN2ABAf54C8boTPx/M58C+Ckx1eiD/rugb+rNMpTshE4rrQW9FxQsYXDtwzhcoqB1/pe9ewO4hhBGAOYhgBmIMYRgDmIIYRwKoG6UW/iFS6cma54LNoWXKWhdpCwkGtCAe1ANDcxPZ0mvcZUZYEkzAH+KksB9/JIS5t12auoZwfADifM+chRR3e95VgV8m6F9PcIhAOcWCbSnNAnczpffPSzLP40sABfeo8B9V5JQgugI89l+HrmHccZA8MD6pjHBnjSoXOzZwMcOnKJE9RKfvXsDuIYQRgDmIYAZiDGEYA5iCGEYA5iGEEsLqlJr6HfKayVCE1y83z2kLyuRxnaaJKuQcATL3FJSgzKc6C7H7HtWRLjHBGxxO+TOoad0pm6q0+PfsSi3JWrqWNsy/Nrfwd1tzCZTPIcbYrrpSzJGZZJCOd5iwUALiMIvAQ4cxfHlx+4ucVgYYQfy75MGex0nnOTPWfZUELAEgm+G+gpYf7QQpe5Tk66NnFxdgdxDACMAcxjADMQQwjgGpabuMi8rKIvC4iR0Xkb8v2q0Rkn4icEpEfiojyYGwY65tqgvQ5AB9wzs2WxRteFJGfA/hLlITjHhORfwbwAEpKJ0uSz/kYGqgsx/CVwDYa4RKHwWEOnnM5XRAhrCxh0NLKgeTgsFLS4vF4PPD+6pW+Ck2VMRzTpY6OnzpOts1ZHmP4PJdnRCKcIGisZzXBhgZWPMxkOEgPRZfqteAAujHew9t5SsNMhktSpgp8vaWLy3MmZ/mzTs7qY8w6/o7vfScrT+6+ZXvF64OHn1H3t5hl7yCuxHwxUqT8zwH4AIAfl+2PAvhEVUc0jHVEVTGIiITKgg1jAJ4F0Adg2jk3nwccwBJqiwuF49KzejrRMNYqVTmIc67onNsDoAfAbQCur/YAC4Xj6hstTDHWFxeUxXLOTQN4HsB7ALSI/G4GrQeAPiNmGOuYapY/6ASQd85Ni0gdgLsB/D1KjvJHAB4DcD+AJ5bb19xcHn19w5X7V5YqaGpk28wU+3IyqT+y7drNsv+921kJcWDoNB+7iSWGXZ5nXesbOKCOKYF77zZdwa+tjWeas1meaZ5W1glMTClqlG3Kun557m3xPD5uInVeHWOuyLPz0wkWSdiQ4hn7mBI8Zz3eXyzK2yWSSh9LSv8ub97CTyXxTkW0o7EyOeGUXhmNarJY3QAeFZEQSnecHznnnhKRYwAeE5G/A/AaSuqLhnFFUY1w3CGUFN0X2/tRikcM44rFZtINIwBzEMMIQJyrruz3ohxMZBzAGQAdAPTIcP1h57I2We5ctjvnOpfbyao6yO8OKnLAOXfrqh/4EmDnsja5WOdij1iGEYA5iGEEcLkc5DuX6biXAjuXtclFOZfLEoMYxnrBHrEMIwBzEMMIYNUdRETuEZE3y626D6728WtBRB4WkTERObLA1iYiz4rIyfL/XO24BhGRrSLyvIgcK7dS/0XZvu7O51K2ha+qg5QLHr8F4KMAdqG0Uu6u1RxDjTwC4J5FtgcBPOecuwbAc+XX64ECgK8453YBuB3AF8ufxXo8n/m28JsB7AFwj4jcjlLV+Tedc1cDmEKpLfyCWO07yG0ATjnn+p1zOZRK5e9d5TGsGOfcCwAWN8Lfi1LLMbCOWo+dc8POuVfLPycBvIFSV+i6O59L2Ra+2g6yBcBCibwlW3XXERudc/NNLiMANl7OwawEEelFqWJ7H9bp+dTSFh6EBekXEVfKma+rvLmINAJ4HMCXnauUMVlP51NLW3gQq+0ggwC2Lnh9JbTqjopINwCU/2ex4TVKWcbpcQDfd879pGxet+cDXPy28NV2kP0ArilnF6IAPgXgyVUew8XmSZRajoEqW4/XAiIiKHWBvuGc+8aCX6278xGRThFpKf883xb+Bv6/LRxY6bk451b1H4CPATiB0jPiX6328Wsc+w8ADAPIo/RM+wCAdpSyPScB/BJA2+UeZ5Xn8l6UHp8OAThY/vex9Xg+AG5Cqe37EIAjAP66bN8B4GUApwD8O4DYhe7bSk0MIwAL0g0jAHMQwwjAHMQwAjAHMYwAzEEMIwBzEMMIwBzEMAL4P/reBAlsXKWPAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 216x216 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import paddle\n",
    "import numpy as np\n",
    "from PIL import Image\n",
    "import matplotlib.pyplot as plt\n",
    "import imgaug as ia\n",
    "import imgaug.augmenters as iaa\n",
    "\n",
    "# 读取数据\n",
    "reader = paddle.batch(\n",
    "    paddle.dataset.cifar.train100(),\n",
    "    batch_size=8) # 数据集读取器\n",
    "data = next(reader()) # 读取数据\n",
    "index = 0 # 批次索引\n",
    "\n",
    "# 读取图像\n",
    "image = np.array([x[0] for x in data]).astype(np.float32) # 读取图像数据，数据类型为float32\n",
    "image = image * 255 # 从[0,1]转换到[0,255]\n",
    "image = image[index].reshape((3, 32, 32)).transpose((1, 2, 0)).astype(np.uint8) # 数据格式从CHW转换为HWC，数据类型转换为uint8\n",
    "print('image shape:', image.shape)\n",
    "\n",
    "# 图像增强\n",
    "# sometimes = lambda aug: iaa.Sometimes(0.5, aug) # 随机进行图像增强\n",
    "# seq = iaa.Sequential([\n",
    "#     sometimes(iaa.CropAndPad(px=(-4, 4))),      # 随机裁剪填充像素\n",
    "#     iaa.Fliplr(0.5)])                           # 随机进行水平翻转\n",
    "# image = seq(image=image)\n",
    "\n",
    "# 读取标签\n",
    "label = np.array([x[1] for x in data]).astype(np.int64) # 读取标签数据，数据类型为int64\n",
    "vlist = ['beaver', 'dolphin', 'otter', 'seal', 'whale',\n",
    "         'aquarium fish', 'flatfish', 'ray', 'shark', 'trout',\n",
    "         'orchids', 'poppies', 'roses', 'sunflowers', 'tulips',\n",
    "         'bottles', 'bowls', 'cans', 'cups', 'plates',\n",
    "         'apples', 'mushrooms', 'oranges', 'pears', 'sweet peppers',\n",
    "         'clock', 'keyboard', 'lamp', 'telephone', 'television',\n",
    "         'bed', 'chair', 'couch', 'table', 'wardrobe',\n",
    "         'bee', 'beetle', 'butterfly', 'caterpillar', 'cockroach',\n",
    "         'bear', 'leopard', 'lion', 'tiger', 'wolf',\n",
    "         'bridge', 'castle', 'house', 'road', 'skyscraper',\n",
    "         'cloud', 'forest', 'mountain', 'plain', 'sea',\n",
    "         'camel', 'cattle', 'chimpanzee', 'elephant', 'kangaroo',\n",
    "         'fox', 'porcupine', 'possum', 'raccoon', 'skunk',\n",
    "         'crab', 'lobster', 'snail', 'spider', 'worm',\n",
    "         'baby', 'boy', 'girl', 'man', 'woman',\n",
    "         'crocodile', 'dinosaur', 'lizard', 'snake', 'turtle',\n",
    "         'hamster', 'mouse', 'rabbit', 'shrew', 'squirrel',\n",
    "         'maple', 'oak', 'palm', 'pine', 'willow',\n",
    "         'bicycle', 'bus', 'motorcycle', 'pickup truck', 'train',\n",
    "         'lawn-mower', 'rocket', 'streetcar', 'tank', 'tractor'] # 标签名称列表\n",
    "vlist.sort() # 字母上升排序\n",
    "print('label value:', vlist[label[index]])\n",
    "\n",
    "# 显示图像\n",
    "image = Image.fromarray(image)   # 转换图像格式\n",
    "image.save('./work/out/img.png') # 保存读取图像\n",
    "plt.figure(figsize=(3, 3))       # 设置显示大小\n",
    "plt.imshow(image)                # 设置显示图像\n",
    "plt.show()                       # 显示图像文件"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "train_data: image shape (128, 3, 32, 32), label shape:(128, 1)\n",
      "valid_data: image shape (128, 3, 32, 32), label shape:(128, 1)\n"
     ]
    }
   ],
   "source": [
    "import paddle\n",
    "import numpy as np\n",
    "import imgaug as ia\n",
    "import imgaug.augmenters as iaa\n",
    "\n",
    "# 训练数据增强\n",
    "def train_augment(images):\n",
    "    # 转换格式\n",
    "    images = images * 255 # 从[0,1]转换到[0,255]\n",
    "    images = images.transpose((0, 2, 3, 1)).astype(np.uint8) # 数据格式从BCHW转换为BHWC，数据类型转换为uint8\n",
    "    \n",
    "    # 增强图像\n",
    "    sometimes = lambda aug: iaa.Sometimes(0.5, aug) # 随机进行图像增强\n",
    "    seq = iaa.Sequential([\n",
    "        sometimes(iaa.CropAndPad(px=(-4, 4))),      # 随机裁剪填充像素\n",
    "        iaa.Fliplr(0.5)])                           # 随机进行水平翻转\n",
    "    images = seq(images=images)\n",
    "    \n",
    "    # 减去均值\n",
    "    mean = np.array([0.4914, 0.4822, 0.4465]).reshape((1, 1, 1, -1)) # cifar数据集通道平均值\n",
    "    stdv = np.array([0.2471, 0.2435, 0.2616]).reshape((1, 1, 1, -1)) # cifar数据集通道标准差\n",
    "    \n",
    "    images = (images/255.0 - mean) / stdv # 对图像进行归一化\n",
    "    images = images.transpose((0, 3, 1, 2)).astype(np.float32) # 数据格式从BHWC转换为BCHW，数据类型转换为float32\n",
    "    \n",
    "    return images\n",
    "\n",
    "# 验证数据增强\n",
    "def valid_augment(images):\n",
    "    # 转换格式\n",
    "    images = images * 255 # 从[0,1]转换到[0,255]\n",
    "    images = images.transpose((0, 2, 3, 1)).astype(np.uint8) # 数据格式从BCHW转换为BHWC，数据类型转换为uint8\n",
    "    \n",
    "    # 减去均值\n",
    "    mean = np.array([0.4914, 0.4822, 0.4465]).reshape((1, 1, 1, -1)) # cifar数据集通道平均值\n",
    "    stdv = np.array([0.2471, 0.2435, 0.2616]).reshape((1, 1, 1, -1)) # cifar数据集通道标准差\n",
    "    \n",
    "    images = (images/255.0 - mean) / stdv # 对图像进行归一化\n",
    "    images = images.transpose((0, 3, 1, 2)).astype(np.float32) # 数据格式从BHWC转换为BCHW，数据类型转换为float32\n",
    "    \n",
    "    return images\n",
    "\n",
    "# 读取训练数据\n",
    "train_reader = paddle.batch(\n",
    "    paddle.reader.shuffle(paddle.dataset.cifar.train100(), buf_size=50000),\n",
    "    batch_size=128) # 构造数据读取器\n",
    "train_data = next(train_reader()) # 读取训练数据\n",
    "\n",
    "train_image = np.array([x[0] for x in train_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取训练图像\n",
    "train_image = train_augment(train_image)                                                       # 训练图像增强\n",
    "train_label = np.array([x[1] for x in train_data]).reshape((-1, 1)).astype(np.int64)           # 读取训练标签\n",
    "print('train_data: image shape {}, label shape:{}'.format(train_image.shape, train_label.shape))\n",
    "\n",
    "# 读取验证数据\n",
    "valid_reader = paddle.batch(\n",
    "    paddle.dataset.cifar.test100(),\n",
    "    batch_size=128) # 构造数据读取器\n",
    "valid_data = next(valid_reader()) # 读取验证数据\n",
    "\n",
    "valid_image = np.array([x[0] for x in valid_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取验证图像\n",
    "valid_image = valid_augment(valid_image)                                                       # 验证图像增强\n",
    "valid_label = np.array([x[1] for x in valid_data]).reshape((-1, 1)).astype(np.int64)           # 读取验证标签\n",
    "print('valid_data: image shape {}, label shape:{}'.format(valid_image.shape, valid_label.shape))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 模型设计"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "import paddle.fluid as fluid\n",
    "from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, BatchNorm\n",
    "import math\n",
    "\n",
    "# 模组结构：输入维度，输出维度，滑动步长，基础长度\n",
    "group_arch = [(16, 16, 1, 18), (16, 32, 2, 18), (32, 64, 2, 18)]\n",
    "group_dim  = 64  # 模组输出维度\n",
    "class_dim  = 100 # 类别数量维度\n",
    "\n",
    "# 卷积单元\n",
    "class ConvUnit(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, filter_size=3, stride=1, act=None):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化卷积单元，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim      - 输入维度\n",
    "            out_dim     - 输出维度\n",
    "            filter_size - 卷积大小\n",
    "            stride      - 滑动步长\n",
    "            act         - 激活函数\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(ConvUnit, self).__init__()\n",
    "        \n",
    "        # 添加卷积\n",
    "        self.conv = Conv2D(\n",
    "            num_channels=in_dim,\n",
    "            num_filters=out_dim,\n",
    "            filter_size=filter_size,\n",
    "            stride=stride,\n",
    "            padding=(filter_size-1)//2,                       # 输出特征图大小不变\n",
    "            param_attr=fluid.initializer.MSRA(uniform=False), # 使用MARA 初始权重\n",
    "            bias_attr=False,                                  # 卷积输出没有偏置项\n",
    "            act=None)\n",
    "        \n",
    "        # 添加正则\n",
    "        self.norm = BatchNorm(\n",
    "            num_channels=out_dim,\n",
    "            param_attr=fluid.initializer.Constant(1.0), # 使用常量初始化权重\n",
    "            bias_attr=fluid.initializer.Constant(0.0),  # 使用常量初始化偏置\n",
    "            act=act)\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征进行卷积和正则\n",
    "        输入:\n",
    "            x - 输入特征\n",
    "        输出:\n",
    "            x - 输出特征\n",
    "        \"\"\"\n",
    "        # 进行卷积\n",
    "        x = self.conv(x)\n",
    "        \n",
    "        # 进行正则\n",
    "        x = self.norm(x)\n",
    "        \n",
    "        return x\n",
    "\n",
    "# 队列结构\n",
    "class SSRQueue(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, stride=1, queues=2, act=None):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化队列结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim  - 输入维度\n",
    "            out_dim - 输出维度\n",
    "            stride  - 滑动步长，1保持不变，2下采样\n",
    "            queues  - 队列长度，分割尺度为2^(n-1)\n",
    "            act     - 激活函数\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(SSRQueue, self).__init__()\n",
    "        \n",
    "        # 添加队列变量\n",
    "        self.queues = queues # 队列长度\n",
    "        self.split_list = [] # 分割列表\n",
    "        \n",
    "        # 添加队列列表\n",
    "        self.queue_list = [] # 队列列表\n",
    "        for i in range(queues):\n",
    "            # 添加队列项目\n",
    "            queue_item = self.add_sublayer( # 构造队列项目\n",
    "                'queue_' + str(i),\n",
    "                ConvUnit(\n",
    "                    in_dim=(in_dim if i==0 else out_dim), # 每组队列项目除第一个外，in_dim=out_dim\n",
    "                    out_dim=out_dim,\n",
    "                    filter_size=3,\n",
    "                    stride=(stride if i==0 else 1), # 每组队列项目除第一块外，stride=1\n",
    "                    act=act))\n",
    "            self.queue_list.append(queue_item) # 添加队列项目\n",
    "            \n",
    "            # 计算输出维度\n",
    "            if i < (queues-1): # 如果不是最后一项\n",
    "                out_dim = out_dim//2 # 输出维度减半\n",
    "                self.split_list.append(out_dim) # 添加分割列表\n",
    "            \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x - 输入特征\n",
    "        输出:\n",
    "            x - 输出特征\n",
    "        \"\"\"\n",
    "        # 提取特征\n",
    "        x_list = [] # 队列输出列表\n",
    "        for i, queue_item in enumerate(self.queue_list):\n",
    "            if i < (self.queues-1): # 如果不是最后一项\n",
    "                x = queue_item(x) # 提取队列特征\n",
    "                x_item, x = fluid.layers.split(input=x, num_or_sections=[-1, self.split_list[i]], dim=1)\n",
    "                x_list.append(x_item) # 添加输出列表\n",
    "            else: # 否则不对特征分割\n",
    "                x = queue_item(x) # 提取队列特征\n",
    "                x_list.append(x) # 添加输出列表\n",
    "        \n",
    "        # 联结特征\n",
    "        x = fluid.layers.concat(input=x_list, axis=1) # 队列输出列表按通道维进行特征联结\n",
    "        \n",
    "        return x\n",
    "\n",
    "# 基础结构\n",
    "class ResBasic(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, stride=1, is_pass=True):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化基础结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim  - 输入维度\n",
    "            out_dim - 输出维度\n",
    "            stride  - 滑动步长\n",
    "            is_pass - 是否直连\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(ResBasic, self).__init__()\n",
    "        \n",
    "        # 是否直连标识\n",
    "        self.is_pass = is_pass\n",
    "        \n",
    "        # 添加投影路径\n",
    "        self.proj = ConvUnit(in_dim=in_dim, out_dim=out_dim, filter_size=1, stride=stride, act=None)\n",
    "        \n",
    "        # 添加卷积路径\n",
    "        self.con1 = ConvUnit(in_dim=in_dim, out_dim=out_dim, filter_size=3, stride=stride, act='relu')\n",
    "        self.con2 = SSRQueue(in_dim=out_dim, out_dim=out_dim, stride=1, queues=3, act='relu')\n",
    "        \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x - 输入特征\n",
    "        输出:\n",
    "            x - 输出特征\n",
    "            y - 输出特征\n",
    "        \"\"\"\n",
    "        # 直连路径\n",
    "        if self.is_pass: # 是否直连\n",
    "            x_pass = x\n",
    "        else:            # 否则投影\n",
    "            x_pass = self.proj(x)\n",
    "        \n",
    "        # 卷积路径\n",
    "        x_con1 = self.con1(x)\n",
    "        x_con2 = self.con2(x_con1)\n",
    "        \n",
    "        # 输出特征\n",
    "        x = fluid.layers.elementwise_add(x=x_pass, y=x_con1, act='relu') # 直连路径与卷积路径进行特征相加\n",
    "        \n",
    "        return x\n",
    "    \n",
    "# 模块结构\n",
    "class ResBlock(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, stride=1, basics=1):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化模块结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim  - 输入维度\n",
    "            out_dim - 输出维度\n",
    "            stride  - 滑动步长\n",
    "            basics  - 基础长度\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(ResBlock, self).__init__()\n",
    "        \n",
    "        # 添加模块列表\n",
    "        self.block_list = [] # 模块列表\n",
    "        for i in range(basics):\n",
    "            block_item = self.add_sublayer( # 构造模块项目\n",
    "                'block_' + str(i),\n",
    "                ResBasic(\n",
    "                    in_dim=(in_dim if i==0 else out_dim), # 每组模块项目除第一块外，输入维度=输出维度\n",
    "                    out_dim=out_dim,\n",
    "                    stride=(stride if i==0 else 1), # 每组模块项目除第一块外，stride=1\n",
    "                    is_pass=(False if i==0 else True))) # 每组模块项目除第一块外，is_pass=True\n",
    "            self.block_list.append(block_item) # 添加模块项目\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x      - 输入特征\n",
    "        输出:\n",
    "            x      - 输出特征\n",
    "        \"\"\"\n",
    "        for block_item in self.block_list:\n",
    "            x = block_item(x) # 提取模块特征\n",
    "            \n",
    "        return x\n",
    "\n",
    "# 模组结构\n",
    "class ResGroup(fluid.dygraph.Layer):\n",
    "    def __init__(self):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化模组结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(ResGroup, self).__init__()\n",
    "        \n",
    "        # 添加模组列表\n",
    "        self.group_list = [] # 模组列表\n",
    "        for i, block_arch in enumerate(group_arch):\n",
    "            group_item = self.add_sublayer( # 构造模组项目\n",
    "                'group_' + str(i),\n",
    "                ResBlock(\n",
    "                    in_dim=block_arch[0],\n",
    "                    out_dim=block_arch[1],\n",
    "                    stride=block_arch[2],\n",
    "                    basics=block_arch[3]))\n",
    "            self.group_list.append(group_item) # 添加模组项目\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x      - 输入特征\n",
    "        输出:\n",
    "            x      - 输出特征\n",
    "        \"\"\"\n",
    "        for group_item in self.group_list:\n",
    "            x = group_item(x) # 提取模组特征\n",
    "            \n",
    "        return x\n",
    "        \n",
    "# 残差网络\n",
    "class ResNet(fluid.dygraph.Layer):\n",
    "    def __init__(self):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化残差网络，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(ResNet, self).__init__()\n",
    "        \n",
    "        # 添加初始化层\n",
    "        self.conv = ConvUnit(in_dim=3, out_dim=16, filter_size=3, stride=1, act='relu')\n",
    "        \n",
    "        # 添加模组结构\n",
    "        self.backbone = ResGroup() # 输出：N*C*H*W\n",
    "        \n",
    "        # 添加全连接层\n",
    "        self.pool = Pool2D(global_pooling=True, pool_type='avg') # 输出：N*C*1*1\n",
    "        \n",
    "        stdv = 1.0/(math.sqrt(group_dim)*1.0)                    # 设置均匀分布权重方差\n",
    "        self.fc = Linear(                                        # 输出：=N*10\n",
    "            input_dim=group_dim,\n",
    "            output_dim=class_dim,\n",
    "            param_attr=fluid.initializer.Uniform(-stdv, stdv),   # 使用均匀分布初始权重\n",
    "            bias_attr=fluid.initializer.Constant(0.0),           # 使用常量数值初始偏置\n",
    "            act='softmax')\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入图像进行分类\n",
    "        输入:\n",
    "            x - 输入图像\n",
    "        输出:\n",
    "            x - 预测结果\n",
    "        \"\"\"\n",
    "        # 提取特征\n",
    "        x = self.conv(x)\n",
    "        x = self.backbone(x)\n",
    "        \n",
    "        # 进行预测\n",
    "        x = self.pool(x)\n",
    "        x = fluid.layers.reshape(x, [x.shape[0], -1])\n",
    "        x = self.fc(x)\n",
    "        \n",
    "        return x"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tatol param: 2122388\n",
      "infer shape: [1, 100]\n"
     ]
    }
   ],
   "source": [
    "import paddle.fluid as fluid\n",
    "from paddle.fluid.dygraph.base import to_variable\n",
    "import numpy as np\n",
    "\n",
    "with fluid.dygraph.guard():\n",
    "    # 输入数据\n",
    "    x = np.random.randn(1, 3, 32, 32).astype(np.float32)\n",
    "    x = to_variable(x)\n",
    "    \n",
    "    # 进行预测\n",
    "    backbone = ResNet() # 设置网络\n",
    "    \n",
    "    infer = backbone(x) # 进行预测\n",
    "    \n",
    "    # 显示结果\n",
    "    parameters = 0\n",
    "    for p in backbone.parameters():\n",
    "        parameters += np.prod(p.shape) # 统计参数\n",
    "    \n",
    "    print('tatol param:', parameters)\n",
    "    print('infer shape:', infer.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 训练模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJztnXd4VUX6xz9z00N6CCW00CHSCUgviooN7MJi17X7s9fVXevKuurae1nXAiK2FUQsNF2poUvvhJZCSEhP7p3fH3NuSXJTSbkJ7+d5eO65c+acM+ee8J33vPPOO0prjSAIgtD0sTV2AwRBEIS6QQRdEAShmSCCLgiC0EwQQRcEQWgmiKALgiA0E0TQBUEQmgki6IIgCM0EEXRBEIRmggi6IAhCM8G/Pk7asmVLnZCQUB+nFgRBaJYkJyena63jTuQc9SLoCQkJrFq1qj5OLQiC0CxRSu090XOIy0UQBKGZIIIuCILQTBBBFwRBaCbUiw/dG8XFxaSkpFBQUNBQl2y2BAcH0759ewICAhq7KYIg+BANJugpKSmEh4eTkJCAUqqhLtvs0FqTkZFBSkoKnTt3buzmCILgQzSYy6WgoIDY2FgR8xNEKUVsbKy86QiCUI4G9aGLmNcN8jsKguANnxoUtTs0R3MLkWXxBEEQao5PCfqxvCJSMvNJzyms0/NmZGQwYMAABgwYQJs2bWjXrp3re1FRUbXOce2117J169ZqX/O9997jrrvuqm2TBUEQakyDDYpWB6cnoaDYUafnjY2NZe3atQA8/vjjhIWFcd9995Wqo7VGa43N5r2P+/DDD+u0TYIgCHVNtSx0pdQepdQGpdRapVS9zel3eloayuGyY8cOEhMTmTZtGqeccgqHDh3ixhtvJCkpiVNOOYUnn3zSVXfUqFGsXbuWkpISoqKieOihh+jfvz/Dhw8nNTW10uvs3r2b8ePH069fP8444wxSUlIAmDlzJn369KF///6MHz8egA0bNjBkyBAGDBhAv3792LVrV/39AIIgNCtqYqGP11qn18VFn/juDzYdzC5XXmx3UFTiwN9PEeTvV6NzJsZH8LfzT6lxW7Zs2cJ//vMfkpKSAJg+fToxMTGUlJQwfvx4LrnkEhITE0sdk5WVxdixY5k+fTr33HMPH3zwAQ899FCF17j11lu54YYbmDZtGu+88w533XUXs2fP5oknnmDRokW0bt2aY8eOAfDGG29w3333cfnll1NYKOMJgiBUH5/yoTcGXbt2dYk5wIwZMxg0aBCDBg1i8+bNbNq0qdwxISEhnH322QAMHjyYPXv2VHqN5cuXM2XKFACuuuoqfv31VwBGjhzJVVddxXvvvYfDYdxMI0aM4Omnn+a5555j//79BAcH18VtCoJwElBdC10DPyqlNPC21vqdE7loRZb04awCUo8XEBEcQELLFidyiWrTooX7Otu3b+fll19mxYoVREVFccUVV3iN9w4MDHRt+/n5UVJSUqtrv/vuuyxfvpw5c+YwaNAg1qxZw5VXXsnw4cOZO3cuEydO5IMPPmDMmDG1Or8gCCcX1bXQR2mtBwFnA7cppcopjFLqRqXUKqXUqrS0tFo1Rlvec0cjuRmys7MJDw8nIiKCQ4cOMX/+/Do577Bhw5g1axYAn3zyiUugd+3axbBhw3jqqaeIjo7mwIED7Nq1i27dunHnnXdy3nnnsX79+jppgyAIzZ9qWeha6wPWZ6pS6mtgKLCkTJ13gHcAkpKSaqXIDl36s6EZNGgQiYmJ9OrVi06dOjFy5Mg6Oe/rr7/Oddddx7PPPkvr1q1dETN33303u3fvRmvNmWeeSZ8+fXj66aeZMWMGAQEBxMfH8/jjj9dJGwRBaP6oqgbdlFItAJvW+ri1/RPwpNb6h4qOSUpK0mUXuNi8eTO9e/eu9FopR/M4mldESIAf3VuHV/ceTkqq83sKgtB0UEola62Tqq5ZMdWx0FsDX1vTzf2BzyoT8xOhbqPPBUEQTi6qFHSt9S6gfwO0BYfla5FAPUEQhJrjU2GLjTUYKgiC0BzwKUG3W4Iuui4IglBzfErQHeJEFwRBqDW+Jegu01xMdEEQhJriU4Jur8dB0fHjx5ebKPTSSy9xyy23VHpcWFgYAAcPHuSSSy7xWmfcuHGUDdOsrFwQBKE+8ClBbxUeRICfrV4UferUqcycObNU2cyZM5k6dWq1jo+Pj2f27Nl13zBBEIQ6wrcEPSKYsCD/erHQL7nkEubOneta0GLPnj0cPHiQ0aNHk5OTw+mnn86gQYPo27cv3377bbnj9+zZQ58+fQDIz89nypQp9O7dmwsvvJD8/Pwqrz9jxgz69u1Lnz59ePDBBwGw2+1cc8019OnTh759+/Kvf/0LgFdeeYXExET69evnSuolCIJQFY2zwMW8h+DwBq+7WpXYiXFoCKxh09r0hbOnV7g7JiaGoUOHMm/ePCZPnszMmTO57LLLUEoRHBzM119/TUREBOnp6QwbNoxJkyZVuHbnm2++SWhoKJs3b2b9+vUMGjSo0qYdPHiQBx98kOTkZKKjoznzzDP55ptv6NChAwcOHGDjxo0ArhS606dPZ/fu3QQFBbnKBEEQqsKnLPT6xtPt4ulu0VrzyCOP0K9fPyZMmMCBAwc4cuRIhedZsmQJV1xxBQD9+vWjX79+lV535cqVjBs3jri4OPz9/Zk2bRpLliyhS5cu7Nq1izvuuIMffviBiIgI1zmnTZvGJ598gr+/Ty0qJQiCD9M4alGJJZ2emUdWfgmJ8RF1ftnJkydz9913s3r1avLy8hg8eDAAn376KWlpaSQnJxMQEEBCQoLXtLl1TXR0NOvWrWP+/Pm89dZbzJo1iw8++IC5c+eyZMkSvvvuO5555hk2bNggwi4IQpX4loWuHYTaswmkbheJdhIWFsb48eO57rrrSg2GZmVl0apVKwICAli4cCF79+6t9Dxjxozhs88+A2Djxo1VprgdOnQoixcvJj09HbvdzowZMxg7dizp6ek4HA4uvvhinn76aVavXo3D4WD//v2MHz+ef/zjH2RlZZGTk3PiNy8IQrPHt8w+rYkuOkQhMfV2ialTp3LhhReWiniZNm0a559/Pn379iUpKYlevXpVeo5bbrmFa6+9lt69e9O7d2+XpV8Rbdu2Zfr06YwfPx6tNeeeey6TJ09m3bp1XHvtta7Vip599lnsdjtXXHEFWVlZaK35v//7P6Kiok78xgVBaPZUmT63NtQ2fS5aow+tJV1HEdeuc523qzkh6XMFoXlRF+lzfcvlohQO/LBJIl1BEIQa41uCDmhlww+7rHYvCIJQQxpU0Ksj0k4L/Uh2/UeZNFWksxMEwRsNJujBwcFkZGRUKUZ2bPjh4HhBSQO1rGmhtSYjI4Pg4ODGboogCD5Gg0W5tG/fnpSUFNLS0iqtV5iVik2XkOlXSMlRES1vBAcH0759+8ZuhiAIPkaDCXpAQACdO1cdubL4+SfodXwpj8V9yre3D2yAlgmCIDQPfG5QNEeFE0FeYzdDEAShyeFzgp5na0GIKsIf8aELgiDUBJ8T9FzVAoAwndvILREEQWha+J6g24ygt3BI/hJBEISa4HOCnq0tCx0RdEEQhJrgc4J+HEvQHeJyEQRBqAk+J+jZlqC3QARdEAShJvicoBf4hQFioQuCINQUnxP0Jy4bDkCnsOJGbokgCELTwucEvV1cLEX4E1xyvLGbIgiC0KTwOUFHKXJoQVCJRLkIgiDUBN8TdCBHtSDYLha6IAhCTfBRQQ8TQRcEQaghPinox1U4oSXZjd0MQRCEJoVPCnqOLYxQuwi6IAhCTai2oCul/JRSa5RSc+qzQQAHC0MILM4iee/R+r6UIAhCs6EmFvqdwOb6aognRx2hhJPPb9uONMTlBEEQmgXVEnSlVHvgXOC9+m2OwREUhU1p2odITnRBEITqUl0L/SXgAcBRj21xcf2Zg8xGvrhcBEEQqkuVgq6UOg9I1VonV1HvRqXUKqXUqqoWgq6KwLBYc86CYyd0HkEQhJOJ6ljoI4FJSqk9wEzgNKXUJ2Uraa3f0Vonaa2T4uLiTqhR/mExAPiJoAuCIFSbKgVda/2w1rq91joBmAIs0FpfUZ+N8g81gp6fnVGflxEEQWhW+GQcOiHRAPyxay+/bD7CzBX7GrlBgiAIvo9/TSprrRcBi+qlJZ6ERAEQRQ7Xf7QKgClDO9b7ZQVBEJoyvmmh+wVwXIcQpWSRC0EQhOrim4IOZNGCKCUpdAVBEKqLzwq6CokmQtYVFQRBqDY+K+gtIluKhS4IglADfFbQdUg00YigC4IgVBffFfSYriSow8SR2dhNEQRBaBL4rKDb+03FXzm41G8xAA6HbuQWCYIg+DY+K+jBbXrwuz2RqX4LseGgyN4gecEEQRCaLD4r6C0C/fm3/Sw62NK4xe+/FIugC4IgVIrPCrrNpvjRkcR39mHc5f8l9tStjd0kQRAEn8ZnBd2geLXkQgKUHQ5vgPxjUJDV2I0SBEHwSXxa0K8f1ZlsHQqAo+A4fH0zfHtbI7dKEATBN6lRcq6GxqE1uYQAcOBIGjFZKRAQ3MitEgRB8E182kJ3ODS5GAH/Zd0OHAXZUJzfyK0SBEHwTXzcQgcHNvJ0EC0oQBdmg59PN1kQBKHR8GkL/ey+bQDIIYQW5GMrPA4lBY3cKkEQBN/EpwV9RNeWbHryLHJ0MC1VNkqXQHFeYzdLEATBJ/FpQQcIDfSnZWwsrZWV06VYLHRBEARv+LygAwSERNBWHTVfSvJBS14XQRCEsjQJQbcFh9MSjwlF4kcXBEEoR5MQdL/gCGzKwyqX0EVBEIRyNA1BDwkvXSAWuiAIQjmahKCroLDSBWKhC4IglKNJCDpBEaW/i6ALgiCUo2kIemAZC11cLoIgCOVoGoJezuUik4sEQRDK0jQEvayFLpOLBEEQytE0BD2obJSL+NAFQRDK0qQEPc/KjS6DooIgCOVpGoJuuVwOOyLNdxF0QRCEcjQNQbcGRdOIMt8lykUQBKEcTUPQA43LJV2beHRdJFEugiAIZWkagm5Z6Ed1BA6tsIugC4IglKNpCLp/EDkBMRzQLSkgkJLCfBO6+NEkOLi2sVsnCILgEzQNQQd+GP0lH9jPpoAA7EW5cGwv7F4MKSsbu2mCIAg+QZWCrpQKVkqtUEqtU0r9oZR6oiEaVpaBvXtQRAAFBDI3eTfHMg6bHUW5jdEcQRAEn6M6FnohcJrWuj8wAJiolBpWv80qT2RIAAD5OohgVcTWXXvMDkkDIAiCAIB/VRW01hrIsb4GWP8afA24iGAj6IUEEkIRQcXWCkYi6IIgCEA1fehKKT+l1FogFfhJa73cS50blVKrlFKr0tLS6rqdBPqbphYQQBBFBDsFXSJeBEEQgGoKutbarrUeALQHhiql+nip847WOklrnRQXF1fX7XThdLkEFmWaArHQBUEQgBpGuWitjwELgYn105zKeWLSKRQQSAiF4nIRBEEoQ3WiXOKUUlHWdghwBrClvhvmjXE94ygggGCKCSi0LHRxuQiCIADVGBQF2gIfKaX8MB3ALK31nPptlneCA/woIJBgitibsp9WNnAU5TadYHpBEIR6pDpRLuuBgQ3QlioJDvCjQAcRbCskWpvAG0ehCLogCAJUz0L3GYIDbJYPvQilrEhKSaUrCIIANDFBD/SzsVe3IkwVEIaVQrdYZooKgiBAE8rlAqCUYqWjV+kyiXIRBEEAmpigA9x3xQVka7MUXaqOQsn6ooIgCEATFPRTu7Ui2dEDgIM6FltJATgcjdwqQRCExqfJCXqAn9vtckDHmkJxuwiCIDRBQbfZ+MY+km/tI1jr6GYKRdAFQRCanqDbbIrLJ4zgzuLbOWqtMeoolEgXQRCEJifoAMcLigHIIwiA/LzjjdkcQRAEn6BJCvr+TONiyScQgIJcEXRBEIQmKej3ndmTnq3DydfBABTmi6ALgiA0SUHv3jqcd69KcrlcCvPFhy4IgtAkBR3MCkZOQc/Ozmrk1giCIDQ+TVbQA/wUBdr40P/Yc4gXftyK3dHgS50KgiD4DE1W0D0t9C37j/Dqgh0s2prayK0SBEFoPJqsoAf42cjDDIqGUghAYYk7BUBhiZ2BT/7IvA2HGqV9giAIDU2TFfRAPxuFBODQihBVSCQ5tDy4wLU/7XghmXnFPDVnUyO2UhAEoeFosoJusylAkU8goRTyiP9nDFl6GxRkAybVLoB41QVBOFloUgtceCOPYHqpfQyxbUWhITcNgiNQ1n4tii4IwklCk7XQnXxpH8Nov40EK5MOgBwZGBUE4eSkyVvo/yi5nBAKSLTtZYhtm7HQwRXCqMXpIgjCSUKTF3SNjb+VXEscmawMvg1yjYXusHwtEpouCMLJQpN3uTjJJByAjNQDADLJSBCEk44mLejdW4UxeUA8ACX4c1SH8f3S9azZl+l2uYiuC4JwktCkBf2ne8by8pSBru8ZOpJYlc2Fb/zOku3pVqkouiAIJwdNWtDLkq4jaalMoq7V+zIByC20szMth/HPL+JQVn5jNk8QBKFeaVaCnkEEsZiJRWGBZrw3v9jO6S8sZnd6LrNXpTRm8wRBEOqVZiXoaTqSOMtCbxHkGcCjuci2BP+CjMZpmCAIQgPQLAT93H5tAcjQEUSoPAIpRin3/gFqJy8GvsWU9dfDsf2N1EpBEIT6pVkI+ut/GkT/DlGkEwlALNm8/9tu1/4u6iAA0YUpsPS1RmmjIAhCfdMsBB3gzWmD6NW1K4BrYNRJF9shirUfh0O6QcaOxmieIAhCvdNsBD0+KoRrzp8AQH/bzlL7OqtD7NOtSAvqBBk7vR0uCILQ5Gk2gg5AXA+KY3pwnt+yUsVd1GF26zakBbaDY/vAXtxIDRQEQag/mpegA0U9JzFUbSGOYwAoHCSow+zWbZmbEgrabkS9OWEvgVUfmE9BEE5aqhR0pVQHpdRCpdQmpdQfSqk7G6JhtaWk12RsSnOW30oA2nKUEFXEbt2WPbq1qXR0VyO2sB7Y8yvMuRv2/d7YLREEoRGpjoVeAtyrtU4EhgG3KaUS67dZJ0Cr3mx3tHO5XTrbzJqiu3Ub9uo2pk5ZQd8wG/avbMhW1i35Zlasc7UmQRBOTqoUdK31Ia31amv7OLAZaFffDast/jbF945TXW6Xi/1+pUTb2OZoTzoRlPi3gKO72JGaw8It1mIYPzwEv79Suws6HLD4OcjcU2f3UGMKjHuJotzGa4MgCI1OjXzoSqkEYCCw3Mu+G5VSq5RSq9LS0uqmdbXAz6aYaz8Vm9K8GPAGF/n9xuv2C8ggElBsL2kFKSs568UFXPvvleCwQ14GZB+o3QUztsPCZ2DTt3V6HzWiwArTLDreeG0QBKHRqbagK6XCgC+Bu7TW5d7ttdbvaK2TtNZJcXFxddnGGuFvU2zT7dnk6MRov40ssffltZILXPs/LR4HB5J5PuAtQDPokVmgHZBVS0E/stF8Ot0ejUG+ZaEX5jReGwRBaHSqtWKRUioAI+afaq2/qt8mnRh+NgUoLi96jABKOEpEqf2f2M8gTh3jTv+v+cI+llQdZXbkHIGSIvAPrNkFj/xhPvOOnnjjq0Jr087wNqXLXRa6CLognMxUJ8pFAe8Dm7XWL9Z/k04MZSVxOU4o4wcl8tF1Q8vVeaNkMgd1DPf4z3Yl8wINxw+VP6HWla+ScbgCC93hqEXrq2DXInixN6SXme1acAIW+s9PwMxpJ9w0QRAan+q4XEYCVwKnKaXWWv/Oqed21QnxUcEM7xJbrryQQN4smUSSbRsjbRvdO7z50X99Ad4aXfFFnBa6p6Bn7oFn2sDBtbVreEUc3WXcQwdWlS4/EQt93zLYv+LE2yYIQqNTnSiX37TWSmvdT2s9wPr3fUM07kTxt9kI8HOnXezfPtK1vdLRC4Ahtq3uAzz86HvSc3E4NBxIhiMboCiv/AXyMyE7xb3t5PBGsBfCwTV1cyOu61luncMbypQ7o1xqIejZKZCXXj9vFIIgNCjNbqaoJ/5+yuWCAZjQu7Vre59uBUA/5Y5Jd2QZcd5+5Djjnl/EG4t2uNPtegtLPLLJfIa1KS3o1nlqNSN1039h0T+878uzrnFkY+lyp4VeU5eLwwHZh4zVn98AYwCCINQrzVLQI4LNWK+ndQ5w2/huru08gknTkYSoItJ1BNk6lJJMI977M401vmpvpluUM3dTDue+doNLD4pm7S+9vyYsfwv+95J3i9lloW8s7dcvqKWFnpcODiuvTW7jhZoKglA3NEtBv2hQe8C4XDyx2RTxkcGu73utVAAZOoKDOhZH5l4oyGbtfmPxhjlyodCyfr1Z6E6fe+tToCQfivNLl9dU0O0lcGA1FOd5H6DNs1Zcyks30S5ghL22FnqWx5J8Oak1O1YQBJ+jWQp6sd1Yt04LffbNw1ly/3gAggL8XPX2Wm4Xp6AH7/4Zxws9mbtgEQBxDg+RO+rFQs8+CCExEGFWTHL5ssu6XFa8C+9NKG91b5sPH19oJjcBpP5hOgbwnrc97ygEhJptZ3RNUS44rKRcNbXQsw+6t2tioWsNKdbA7O+vwdYfanZdQRDqhWYp6GHWeqLhwQEAJCXE0DHWCGGQv/uW9zksC50I3io5n7dKzqdAB/BCwJu0V2ns320NmNoCKrDQD0JEOwiJNt+dLhGnoOcchpJC2PxfSFkJ+60JtnPvgz++gY1fwc4FkL7dlKd4RK8c9ZK3Pf8odBxmtlOt6BqndW7zbzhBX/MJvHe6GfRd8hys/k/NrtsYHN0Nn1zs7nQFoRnSLAX9rgk9eOScXpzfP77cvgPH8l3bni6XFbo300umcm/eNQyw7eK3oDu5y/9LU7HDUO8+9OMHjXUeEmO+52eaXOvHD0NUJ1N2bB8csKJd/vgKjh+Ble/CyvfgkBXW6IyGSVkFoS3BP9j7Qhx5RyGmC4S1hjSrs3H6z8Pjq+dyOX4E/vt/pm52iumsbP7VF3StYcXbZnv/CtOheHMPVcbuJaZT0xr2/t4wOWh2LYQdP5fuNAWhmdEsBT0k0I8bx3S1Zo2WpsTuHkzc5+FycTLPcSoTCp9jjaMbfWx7KNABpEX2hcy9xjXisJvY7fQdloUe77bQM3bA3v8BGjqNNGU7fjE5VgJamHwvuxeb8pSVblE+uNpEzGybBx1OhZiu5V0uDrsRz5AYiOsFaVtMudNCj2xn3DVV5UTf+j2s/siIm7P9oS2r70NPWekOm9y50HweP1y9Y8F0KF9cYzq1fcvgw3Pg91crrp+xs+Z5crSG7T+X/i2cb1jp22p2rsrI3FP3oamCcAI0S0GvDI8oRnboeHJ1EDt06eSRO3R73i45D4ADuiUvJJeYaJCMnfD+GfDBWfDZZcaq9XS5fHcn/Gey2e403Hz+YWVKGH23Gchc8LT5XlIAaPALNDNAPzof/ILgjCcgtkt5Cz3/mKkf6hT0rUa4nC6ESDMQXKXbxdmJ7PnN7TIKi4PcdFO+93dY97npwLyxfpbx47doZc4B5r6c4wBV8csT7oigZa+be9rxi5mgtf3n0nXtxfD5FaYDOLwB3hwFuxaXrlN4HP59njmHk4Nr4NOLYcscd5nzfjK2V6+d1eHHR+Hzq+rufIJwgpx0gm6zFP3lKQOwhUZzauHrzHOUTw/wk2MwB3UMu3UbfrX3NYVz7zETjdoOcPu4I+KNyJahsM1gsnSo8ZsHhsPIu4wQH9sL7Ye4K/aeZKzGgiy48mto2R1iuxkXj6eF6fTPh8RAXE8j3Fkpbgs9wuqUyrovtIYv/2x89uC27HctMr77yHbQIg5yU40b5t/nwdc3wuzrvP+AOxdAwmirDVZ2R213dwhVsX8F9DjLuJW2zDVlB5Jh1lVGuD3vecU7kLrJxMnPmGomeC18pvT5fn3RLPCx0SPFkDNO3zPvvctCr6WgH90NPz9uUiU7lzBM2wZZ+8xbx7wHGyafjyBUwkkn6E4DfVDHaNb+9UyWPXEhz13Sv1w9O35MLXqUx4qv4wBxFHcYaYQjOBLO+ae7YkS8O/IEYMz90Gkkx0I6cEPRfWTTAjqNAL8AGPugqZN4gRH3sNbQ82xTNuIOaG2tGxLXy0SupHvMYnWKRWgstOptttO2uic0eVro6z43UTXf3g7bf4QNs2D+I2aANn0bKJuxVHNToe9llqCnWakF7KbDObCqvPhl7jUdWdfTIKZz6X3JH8LLA4y4lUVrSEk2YZ1Hd0GbvtC2vxHqoEhzzYwdpoM45JEuYcU7pvPoMMzE9gdFmg5yn7VmbFYKLH3dbKd4pC9ItTotz7BRT0FPWWUmjB0/Yn4rrc0SfpXltF/2Bvz2L9Oh7F9u3kic4yrL3zL/Nsyu+HhBaABOPkG3FD000IQvhgX5M7FPG6919+o2HMLkgnlk1ymmsO+l/JafQGGglaUxPL60H2f8X+Da7ynGn5W6F5cGvgEXWYOIiRfARe/B4Kth/CNw2mPQ6zw453kY+4D7HB2sNwancK373D0QGRptBB+MayH5Q4js6LbQC48b0cncC2s+hi+uNW6d7ANGdLIPQA+rE+kyDrqfYQQ9J83ttx/3kBH9NR+b8zns8ONjplMA6Doeoi1B97OyU67+jxG4X54wVvb+Fe5UCoufg/dOg5/+ZsQ7rpeZjAUw6EozvtDCjGe4xhiO7jYC23uSqQNw6QcQHAUr3zffdy4wKRb6XmY6Kmenl2rN4HUKev4xM3gc2tJEHn14jntRk69vhI1fmiX8fn3B25+B4cgm8zuBaVdWCtiLzPcNX5jPXQtLH+Oc/LV7iXvGcWXYS+CNEbD8narrrZ8la8gK5TjpBP2+s3oC7pBGgGB/v4qqu5hjH8bnJeNg+O1c8cFKfs7vAUBxWBuyC4ph3CNw4TsucS+2Bl8LAiLdPnabDfpdCkHhkDjZErNgGPpnCAhxXyy6s7He9y0zYjr/YSM6YFwuoTGmzsKnjZCd/xIEWwO72380bp2znoGk66A4F4beCO2HwgLLXdF/Coy+F877l2lvWGszoHog2ezvOAK6jIf/vQz/SIBPLjLit2WO6Tha9nBb6G0HmE/nZKq1n5rEZO+fAV/daER30d/NvjWfmM+4nm5BTxgN570IF70DrRJh969AxzLwAAAgAElEQVSm3CnsXcbBgGlw63LoNgF6TDQDug67mYQVFAkDrzB1ne1PK2OhH7P8590mmE97oRFZp9/9+/vM57b5Zq7A/hXwznjTyYER5tRN0P1MUH5G0D3DSp0zg3f/6nbH7FwIz3Ywg80zpsKi6VTJ/uUmHHXh05WHV26ZA1/9GdZ/XvU5hZOKauVDb05cNTyBq4YnlCormyLAG/kE82DJjVwSlQBs4oOSsxk5sB/PzN3LF8kpPDHpcsiFq636hSVmkNDfS6RNlSgFHU5F71/Gql9/YIhzhii4/fVXfWNCH0NbQrfT3Zkdl79tfPa9zjPi5x8Mw2+HxP1GZMHMbE2c5D5nvCXKG74wbxyBoXD2cybqZu9S2DoXBl5prhMcadrntNA7DDWRL2gYfI3prGz+xh20/UdYN9O0p8dZsHG2sfxju0HLnjDpNfOGYLM61M5jIPkjk5d+1yLTlpbdzfVaWW8l3c+A9TONeB9cDe0Gms5B2UwH2D7JhFH6Bxuh1drtSnEeG9HOdEBp1jot+Zmmfs4ROLQG1s0w5/7fS6ZjzEk1Yxht+pmB4Mw9phME06EUZkF4W3PdlFVmQHzrPONC2rnQuMGc8wbAiHVIVPnnvm2e6TAKsoyLZ/wj3v8+nPMZ1nwMAyX1seDmpLPQveGZwKsqznjRWI7Juid7hzzGF8lmEtHf/vsHf/vvH+xMM1EmBcXO2aq1/Ik7Dkcd28ehn1/Frjz63SDLEo9OgDOfhlF3WeXh5rPgmHkLCAw1VvvEZ02sfIeh0G+KEWRnjLyTdoONCOccgdiupqxlN+PXn/Ip3PALnPcSnHKh8Z+DEdroBOg81u2K6DTStGnC4+atw1FiOokuY93WcXSCeRvxDzRvKDaPt6OE0eZNIWWFiWbpMq60OwtMp6L8zGStI39A/CAICoOOw81bxA8Pm3qdx5hIog2zTccHRtDPed4MPjtHUwZZUSqj7zOdwtZ5bst95fuw+Tu3X79Vb9P+zD1mLCAg1IyPAJx6s4np/+Ehs3/fUlPudMOkbjHW/96l8FwX0wGv/ti8bRw/AsveMoPEnceY33TLXBPpNGOqWfw7/5jb+ne64vYtNcncigsQBDgJLfQTZVe6O4pk8uv/K7f/WF4xc9YfJP14IVB7Qc+OH0EYikl+S9kdMYzOU54zlnBFnU9gC/f2yLu815n0qhFtvzKPPbCFGaQ8kGwmLnmilLF6vV3vznVmO7yNGWBt08+9v8Mw41+3FxkR7mzllHf6/72RMBJQxpedfxS6TyhfJyTazJZd+b7pMJyum8s/MYPA62YYYe55jnlD+PpG046up5nObOifTf34AZCxC876O0R1hFNvMQK57E1jUQ+/HdZ+ZsImA63OslWiEfSt3xvXV0wXiOthLOsu48ybx7e3wqeXucMjdy4wnyX5Zoxh5y9mHGHdDFj1oel04weatoLpGHJSzQDsmk/MtbbOg1+eNO2/9N9weD0MuMK88cy60nSEV39X8d+GcNIgFnoZ3rvKi3jVgPScQm7/bA2Pf2cG5rxNbqqKtxfvpN+bB/hT0V+YYz+VZW2nGsEdckPFBzkt99huEN3Jex3/QIjq4H1fx+Hu42tKeFvwDzFWu5PAUOO3B+h6uonC6XuZsfIrIiQa2vYzImgLcFv1ZZn4LPgHme12g8xnaAxM/Qzu2QI3/epOkaAdMOUzyyr34Ky/w+RXzZvNmPuNlX/mU+4Ea0Ouh3u3mIHrouPmLSQszgh6bpqx2mO6mLGQUy4ybqze1gB3+lZzXZQ7iRpA6ma3db3yPePLz00zYj7kBjjtURjwJ9PZaLt7zGHRs2ZWb9oWeGes6ch6nw93rIbxj5roq7Wf1iytgdbG56+1eYvxjOMXmiwi6Ba3juvKmB5xTEhszZ7p57LovnGM6tayxufZevh4qe+BloWutWbJtjQmv/Ybf/7PKuwOM2i66WA213y4goJi98ScZ+eZQb1ljkRuL76TXeHl4+TLERgK1/0INy2pcZsBtwDWRtD7T4HR95R2n4Cxhgde6e5gLn4X+l1W+bkSLEs+YZSxSL3Rtj9c/xNc/L4JG/Ukoi206QORVscV290M8Jal0wgjxp60PsWIe8JoI9b+QWbwOPECMwYARtDBCHXiZPOGcOmHJiwVoM/F5prKz9wDuFNDHF5v3oICWhhRDm9rxjoiO8IZT1odS7i5PzBvPWAs+4BQ+PMvpmNRfsaFFtnO/O6t+8C3t8ELPWH/ysp/XyfbfoCPzjPRMt/dCT/9tXrHCT6NuFwsHphY2hWQ0LIFHWJCKqhdMS/+VHpqeYqVW/2L5BQemL3eKs3i581HuOnjZMKD/DleWELy3kxGdmvJWf8qL8iLtqZx89hCYsOCKr94x1Nr3F4XPc52D1LWlD4XeS8/5QLzryZ0HgtLXzMuk8po2b30G0FZgsJM9EvPc0x0UXUZ/3Dp70rBZR+5vzsFPaqjEfqy2Pzggjfh8DrjA9/zq/G9Zx8w4wnFecZf/+vzpkM46+/mrcDTZRbZwXQC+UfN77F7selQ4gfCTYtNSKpzcNzmZ8Y5di0yrqovrjEusiE3GDfX0d0mQmrEHfDNrabjHvpn96SuHx81LqYjG00Kh7ILkFdGcX7p6Cyh0RELvRICPfzfo7vX3FoHOJhVwKGsfBZsLp0r5evVJszveKGJJT6QmU9hiZ2tR46XO8f21Bymvbe8VtevNn7+ZpDSaWk2Ft1Oh3NfdIcingiTX4de5574eTyJ7WbcW2PuLz8W4aTDECOozvEI5+Cxc+Zq0nWmbaOst5qgsNLHK+W20k//qxnEHmTFT4VEu6OSnEQnmAijSz6Ewmwj1kv+CYfWmayYC56ChX83PvffXjLx69vmA8q8BSjrzcqZmydjZ+VLEhYeNwuLP99T8uj7GCLolTC6e5xr+5LB7XnrikGV1m8V7t2C/njpXn74o3QCq4KS0rlPftp8hOMFFU8U2XK4vNCXJSuvmGN5Ra7vaccLeXvxTrTn6kYWu9JyvJZXlxK7gx2pVbepxtj8jP86MLTquo1BcATcv9MdHVMZzlj96AQ49wUzqeysvxtXycArILx1xcf2mGhEvd1guGu9mcxVFe2T4OH9MPJO82bw9c1mQDiyownBBOOLX/aGEXLnAHGfi4wrZ+cvJrro1UEw587Sq2KBicvX2px36/cmXLNsLHzeUTPrtrq5fYQ6RQS9EiYktmZ4FzNT1M+m6BTrfi2ODClvyY7oGuv1PKv2ZpYrW7S1dLranzYdYdaqaswm9KCg2M4Ds9dxKMsM5PV/8kcGPPmTa//9s9fx7LwtrE/JKnXc6n2ZnPbCYv6ztHwCroJiO9+uPVCl2P9z/lYmvLiEfRleFs9u7vgHVq9eq0QjqG0HmDeffpfC8Nuqd+ywm814SG0iV/pcZAZlUzeZdBODLeu+wzDjv//prybufuxDMO5hM07Q9XQTTfPjoyaEdfV/jItmxbuw+J9mpuyLvU1O+S1zzLHth5qBW8+/lZ8fN7Nut8miJ42BCHoVjOlhrPSOMaH0bhvBgnvHsuOZswkOKP/ThQZ5fwXPLazeFO3nfthadSUPVu/LZNaqFO7/Yr3X/XlFxkrKLSp9/Z2pJlZ+3f7yUREv/LiVO2euZcn2ypNtLdttptmn5xbWqM0nFRHxcN+22o1LnAitEk14aGQHM8t2wJ+Mm2jYLTDkOpNL5+rvoEWsSfPQqjec9hcj8ofXw+l/MyK/YZaZRbvwaZhxuck4uvMXk35h2C3mLSNtC6yyUjGkb3dH5jhj/x0O4/55/0wTg++kKNeEnn53l7HqM3bCofUm35BQa2RQtApuGtOF8b3i6NXGhAV2iTP+zsGdovl+g3GjTOjdmm6twrhuVAKfLTfTzYd3iWXpLjPD84+D2XXerrX7j7HCEtVdaaVT5q7cc5RVezJdqzMVljgosTuwKcX/dqbzi+XP9xZSmZ5jXDap2ZVPVnFa8DaJfa4cZ9qHhkQpE6oJ5m0iIh4e3GPcWRUNUkd1hKlWbHzSdcavP+hqM2A65x7Yv8wMrDocJlQ0OMJELG36Bubea3LVpKw0ncLAaSaxWvoO87nibdOhfDjRuKr8g010TcExQJkU086soa37wLXzzHbaFjNxzM/fRAetmwmj7nZHNv34qAkF/dMXNRv4bsaIoFeBzaZcYu7J85f2JzwogM9X7ScyJICHzjZRMg+f3Ytn522hT7sIl6DXlHP7tWXueu+rAL36y3YS4yO4/iP3yjvFDs0RDwG++oMV5BXZ6dvOhP0VlTjo9pd5nJnYmh83ueOi/b2kPHC+eRSUVDIoBjhOwP8uNADOGb9OyoaUeqPDUHdiOHCHm17whklTPPJuY9U7CQiBabNNWmmnj/6id80A8JpP4L+3mzQFSdfBGU+ZePplbxg3VPczLfeTgp8eM99DY+D7++GVAVaiNQ3DbjMzj7+60SSPW/uZmZ8QHGkWagHY9LUJFwUoslyAnmMwOWkmiV1ItHvcoJkigl5LQgP9Gdwpms/L+L2dFqvjBPQuroLwxKSnfyY9p/wrqdaav3670fXd6Wo5aC2350xH4CnmAP42G3aH5uGv1nPNiM4kxkcQbC2iXVhs5+Nle5m1cj/f3TGK/yzdw4TerYmPCrGuiaue0MyJ7QoXvO59n80Pzv2XEdjQlu55BqPuNlk/A0JN4rqgMJMXZ/S9EBhWehzi+h/d26Gxxnpv088knlv2usmDk7HDDCwfWGPSKWQfgJ7nmsihHx4xOYyK8tyJ2TqNMG8DaVtNGGxxHqBMYrhdi80Er/A2kJdhkrfFdDH5i7IPmvkBEW1NWGaLSqLbHPbqdZQNiAj6CTCmRxw2BVePcM/MdHogqmvBvnhZf56dt4W0426hjqsgWsabmEP5YAQnGbnGfVKRb/7jZXs5vXcrZq1KYdXeTBbcO45Ay02TnV/MK3M3A3Aku4C/fvsHM1fs5/s7zcQfZ4dVWIUlL5wE2GxmYpQnI+4wM2ATJ5sZtk68LAZTisTJ7glfRXlGYA9vNCkRhtwAzrVhSgrNxK/tP5s3BL9AiIgy0UBKmdmvX15v6vY6z3Qwn11mVgZTfiaCaN9S07m07W/i+F9Lsmb4YmYqa4dxH0W0NxFJLeJMR7D9J+OKyj5gkrS1G+wOUR11d9X3WI+IoJ8AbSKD2fWs9zjn6nokLhrUnsT4CL5Zc5C3FpuUrC3DqhlFYVFsdxASUDtL4du1BwH3wh+FVlIxZ2cAuGaxZuUXu8qcPnQRdMErASFww89V16uMwFC4bp73fc7UD90nmLDOsox/1MThR3V0h4ee/ZxxHZ3/UmnXEphB2YXPmGifnMMm1j7vqBXF4/EWqmxm5nGLliZrZ1aKycy5/Uezb8j1IujNiUn94/lo6R6uHpHAv3/fU27/V7eO4N5Z69jtkeSrV5sIHjo7wiXooYE1eyzZBSWkVWC9V4Uzva+/NaiUZ0XEHLDcNQA/bDSDv97SDBeW1N7lorVmZ1ou3VqFVV1ZEGqCf6CZ4OVJ30vMP2/EdoVLPihfft6/jHWWfcCkWQ5rVT7dhA8hgl7HtIoI5tcHTvO6b8PjZxIeHMDC+8ZRbHdQVMa6ndC7FT9vTnX5sWvC/3bUbgDW2RFsPXKcHzYedk1uckbQgDu3DEDnh+cy8ZQ2LpeS06L3xrJdGRzLK2JAh2hOf2ERX9w8gsR49wCzMx3CpzecykiPvDk703J46eft/N9p3ejWKqxG6Y0FoU5x+sijO1Wc9M6HkFifBsRzlaQAPxstysStv3NlUoUx7h9cc2JZICsiI8ftWrn5k2TmWda4c2DVkz0ZeWgN8zYedvnQ7/1iHX8czCI9p5B9GXl8sWo/CQ/NZeOBLKa8s4ybP1nNj5sOk1tk55xXfnXlulm4NdWV26Zs2OWDs9fz3bqDnPGvJbzyy476uG1BaJaIhd4ALH/kdPZ4uFgqwmZT2FCuHDJDEqLN+sV7Mwny9+Mv5/Tmme8312nbMj1SBdQEu0cYz/R5W/htRzpaQ5uIYADOe/U3137PtAWv/LKdYV1iuPZDd1ZATws8p7AEz+GHeRsPceeE0km4/rN0D33bRTKwYyPEeAuCDyOCXo9cN7IzuYUltI4IprUldNXBU+BCrMWsSxy63Czw8/q1JSUzn7VeZnxWxITerfl5szt8MSUzv5LaFeM5BmB3aNcgcJvIYA6XmZTknGzlxDOiB0xQwv/NWEPPNuH8c/7WSusC/PVbs5zbnunlB6S/TE4h0N/GqG4tycgtEv+8cFIhgl6P/PX8xFod5xRurXHP9iy2l5qVGR0awGt/MsnCSuxm4lB16NUmvJSge+O+M3vw/I/bKq3jSYmHtV6dcM0Se+k6R7IL+e+6g7CufN2M3CLScwppWUFs/v6jeazel8nkAe0A4wICE/qZdrywlOjvSsvhcHYBI7rWLnOmIPg64kP3QZzCHRzgR5C/sdALShw4Z+p3ig11xYMD+PvZSH50Ai9PGVDuXGXp0678rFcn95zRg0sHt2din7Y1am+J3T0wWjYRmDfsZWZdHcmqPM1A0tM/o7UmK6+41LEHj+Vz+dtLuXPmWortpQdnvVn2p72wmD+9u5yVe46W2ycIzYEqBV0p9YFSKlUptbGqukLdMLBDFLeO68rzl/bnquFmZD2pUzSdWppsjzeM6kzbyNILC8SGBXFGYmv6tIvg5SkDWP7I6Sy+fxy/PlA67erwri1Zcr/3VKxXDe/EPy/tX2M3xep9NVj6jNIWPcDKvVUL7PBnF9D/yR9ds18BRkxfwEGrM9h8yHu+nGK7g7nrD7lmywJc+pY7SdTa/ccY9NRPHM2t3ViCIPgS1bHQ/w1MrOd2CB7YbIoHJvaiTWQwp3aJZc/0c4mPCmF8z1Z8fesIrhjmPXwqNNCfOXeMZvKAdrSOCKZTbAs6xISy6cmz6BpnOoPIkAA6xoYy/64xpfK7f3vbSKJC3ROa/nlJv3LnB7h5bFev5TWhrIW8K63qAWOnX74in/+k1/5HhpdY/Gfmbua2z1Zz+guLvR736i/bOZpbxIwV+6o1cC0IvkyVgq61XgLIO6qPMLBjdI3jskMD/fnq1pGlrPWebcJLuVb6d4gqdcylSR347IbyS9qddUprLh7UHoCYFjWb0erk6zUHKtw38ZTKl0DbUSbE0RNvfv/fd3pPA6y15vsNh8ixUhv/c/5Wxj2/iOnztvDzpsrHGLzx1JxNjPvnwhofJwh1SZ0NiiqlbgRuBOjYsWNdnVaoIyJDArwuyrHur2dSXMFyY8787kMSolm5xyzS0TYyhBcu68/YnnEkdYrm4LF8XvxpG7/vNBObEttGsKkC90dZzjqlNfP/KC2ebaMqjwZ67JuKPX8/1UCIH/l6AzNWlF9QxDlb9+0rB3P/F+v4+0V9OeuUNgT42diRmkNBsZ0+7covXv3+b7urfe3qcvnbS4lpEcibVwyu83MLzZM6GxTVWr+jtU7SWifFxcVVfYDgE0SGBlQYQdK3XSSvTB3IS1MGcsng9oQH+RPdwnQKk/rHEx8VQlJCDJ/9eRgPWotsP+fhqln+yOmVXvvPo7vw4MRe3D2hB+2sLI6nxJcXy+pSUfIyb3gTc0/unbWO7IISbv9sDa8u2EGJ3cFNH6/ivFd/I7uguFTd/UfdqzY5c9w8M3cTXyaneD23w6HJrIbPfvnuo66JXoJQHSTKRagQP5tiUv942kWF8Pyl/Vn3tzNdUTdluWlMF+bfNcZlvbYKD6J1RDDf3DaS/94+slz9t64YRFJCDLeM68qdE7pz/1k9iQoNYEAH74J+05gutbqHbUcqdtFURo7HKlOv/LKde79Yx07L1z/+n4tcwp1fZGf0c25XizNZ2bu/7naFUHqSlV/Mg1+uZ+BTP5Va/3XzoWy2VrBu7HSP1AuCUBkShy5UG5uXFY489/VsEw7AikdOd02IGlDGN98uKoQDx/JpF1V6EegLBrbjgoHtXOujAoQE+JFfbGdS/3juPbMnGw5kuVw7tSW2RWCpTJLVxZmVEkxs/NUfriR5z1Fem1Z64fA/DmZRVOKO4ikotqO1mSCmtWb0PxaQbeXL2ZGaQ4824UQEB3D2y78C0LN1OHP/bxT+fm5b663FO7n9tG6EBPh5XWVKEJxUJ2xxBrAU6KmUSlFKXV//zRKaMq0igkvlrfHk4XOMa6ZTy1Cv+4M93gBKLN/+vWf2INDf5krgdWrnGObcMYr3ry6d3+b/Ti+dIsAblc3YLTswXBlLtqWRW2QvlcIA4OI3lzL13WWu7yOmL6D3X39g0mu/0f0v81xiDnDJW0s548XFpRYH33rkOMt3l49B6PO3+Tzy1YZqt084OalOlMtUrXVbrXWA1rq91vr9hmiY0Dw5r188e6afS0QFgu+ZadIZr+609v80tCMPTuzFg2f3ok+7yHJRNvec0YM9089lxSOnMyShfJ6X8T3jOKdvxVE03eshTYAzvn19Sla5+Hsws2SdScqcTHtvOQu3pJar+/mq/RQU27nu3yt5bcH2cvsPZxXw6DcbymXxFE4exIcuNAin9WpVrXrOVAdTh3Zw5YdxLt4R3SKQW8Z1ZZCVlMtz1uht493x8a0igjm1s8fal0Dnli147+oh5d4chndx1wsL8u6BrEroe7Su+47g2n+v9Fo++KmfWLAltVyI5vR5Wxj27C98smwfj3/3B6v3Zdboer9sPuLKe79kWxrvLNlZu4YLjYoIutAgvHPlYDY/WfX8NJtN8ccTZ/H0BX1dZRXlh3eK859O7cj9Z/Uqta/AY63Tvu0iWXjfOPxsijE94vC3Ke44rRtbnprIx9cPdQl5iyBznbKrP/10z9hK29w1LowHJ/bixcv6lyqPj6x+QrbqkuuR1vi7dQfRWuNwaFe4JZhkaBe98TsHj+UzOzmFrLzSUTkLt6SWm4R1/UeruPmTZABu/XQ1f/9+C/sy8hCaFjIoKjQI/n42KgiQKYczT/z1ozrz/m+7CfDzbnf0bBPOJ9efSpIX98q1ozqzam8m71+dRKxHWGbnli3Y/szZpSZnLX34NBwa5v9hLNR7z+zB09Z6quN7mhDcaad25NPl+/jmtpFc8Pr/Sl0rPiqEW8aZN4SLBrWnz9/mk5QQzaKtadW74Vry3PwtPDB7PaO7e082NmL6AgCGJsQw6+bh7D+ax5uLd/LZ8n0M7hTNq1MHMvn1/3HNiATXMVl5xa4In5TMPIIDbWw/klNqAZLKePnn7WTmFfH4pFNO7OaEWqF0dRe/rAFJSUl61apVdX5eQahPzOzRw0zs04Yvk1PQaC4d3KFcdM/Hy/aS2Dac33dk8MJP23jsvESuH9XZtb/E7sDPpuj88Pc1bsPgTtEk762Zu6Q6KFW9dW6/vW0kk60O65WpA3nhx63szcjjtT8NZMaKfUzqH0/PNhHlopecJDw0F/Ce2lioHKVUstb6hFayEQtdECyUUpzbz6RDuGxIhwrrXWnl0mkTGYJDG+vdE2fI4bw7R7PlcDa92kQQ5G/jtBcWEx7kz3GPGPdhXWKY1L8d3VuHcUp8BHPWHSJ5byaPnNOL7Udy+CI5hVenDuSU+AhO88hH06VlC3bVIPdMde22UuGZOYXstdwud85ci92hXUsdfnbDqXy95gCPnZ9IRHAA244cZ9Jrv3k9Z3VZtDWVIQkx5VbyEqqPWOiC0EAcPJZPeLA/L/+8nZ82H2FvRh5XDOtYarxAa826lCwGdIji4LF8vt9wiKtHJHA0t4hT//4L7aJC6NkmnKcu6MNIy6XiSUJsKIM7xfDcJf3IyC1k6DO/VLt9ZTub6nLp4Pb4+9mYscK9kMm2p88m0N/GwWP53DNrLVcOS+C2z1Zz5+ndCQ/2Jyu/mNtP60aQvx/7j+Zx4Fg+U95ZxqT+8bwydWCN29AcqAsLXQRdEBqBnMISHpi9jkfPTSQ+KqTK+lpr3vt1NxP7tKFDjInhd7o3PHn2or5MHep+Y0jPKWTqO8vYnlp6xuxpvVqRnlPI4E7RLNmWxj8u7sf1H60iK7+47CmrRXxksCuVMZg5Afec0YObP07mhz+8py+YflFfBnSMYuJLv7rKerUJ51+XDyAsyJ/wYP9SGUCdFNsdZOUX0zIsiBK7g2K75uGv1vPAxF7lfsusvGLyiksosWvaRgaXmrDla4igC8JJzPcbDnE0t4jR3VvSJjKYL1alMHVoR6+zSVMy80jem8mdM9cC3n3cv+9M58nvNvH+NUO8Wv9lRbsq5twxqtTasrXhw2uH8GVyCnPWH+LDa4cwvmcr7pq5hm/WHqRPuwg2HsjmkXN68ffvtzChdytuGtuV7q3CXB1B0tM/u3L83DSmCw+f07vCayXvPUpi20jXvIeGRgRdEIQasSEli9TjBZzeu3Wl9bxZ/3+/sC8TElsx9rlFfHLDqWw9fJxHvi4/e7V9dEila9W+dcUgbv5kdY3bHhUawN/OT+Tuz72sVejBsC4xRIUE8uDZvRj//CJXea824Xx5ywg+XraXCb1bsfnQcfq2i6RTbCinv7CYXem5nNuvLa//qXQ6h2N5RQT5+1Uo9Iu2ptK7bUSN1g32hgi6IAj1wpp9meQW2ukYE8oYK8/7y1MGuNZuBcguKKbf4z+WO/Zfl/cvJ7rOcM9R3VryyQ2ncs+stXy1uuK8+FXRrVUYO1Jrl3jNk06xodw0pqurY4ptEUjyY2eQmVtERm4Ri7am8vTczbSPDuG3B09j5Z6jfL5yP51btuDWcV15YPZ6vkhOIcjfxtanzz6htoigC4JQ7+QWljBn/UEuGdyhnDtnT3ouxXYHZ/xrCX8e3ZmFW9OYddNwwoL86fGoWbh89s3DSUqIweHQaEwWz6U7M5j67jKiQgP49IZT6RoXxof/28M/fqheZsl/XzuEzi1bMPafi+r4bmFo5xhWeMmnU3bQuG1kMIc8XFARwf6sfHRChRlJq0IEXRAEn6WwxE6AzZCyuZYAAAamSURBVOY1S+fu9FzGP7+I+8/qyW3ju7nKn5qzifd/282cO0bx2LcbWbPvGPee0YPgAD/e/203h7ML6N4qjJ/uGUtBsZ1ej/1Q4fX/dn4i/dpHcvGbSyusU9ckdYpm9i0janWsCLogCE2W/UfzaB8dUmrWrtba9T0zt4hfd6QzqX88AAeO5TNy+gJuHNOFR6zBzR2px5nw4hJuGtOFt5fs4tmL+hLoZ+PCge1cHUlRiYMb/rOKpTvTKbZrLhncntnW4iNThnRg8bY0CksclS4U3r9DFOv2e18MffpFfbFrzV++Nqtp1XZSlQi6IAgnFTvTcugQHUqgf/nww9TsAlqdwMDkP+dv4fWFO3nvqiR6x0eQkVNIv/buGbHv/bqLNpHB5BXamTQgnqz8YuwO7QqVvPnjZDYfzmbx/eMrukSlyExRQRBOKrrGVZzZ8kTEHODeM3py45iurrV325WJab9hdOlVs8omjRvbM46oUO9poRsKEXRBEARMpk9vC6lXl6lDO5aa1NUY+O60KUEQBKFGiKALgiA0E0TQBUEQmgki6IIgCM0EEXRBEIRmggi6IAhCM0EEXRAEoZkggi4IgtBMqJep/0qpNGBvLQ5tCaTXcXOaCifrvct9n1zIfVdMJ6113IlcpF4EvbYopVadaC6DpsrJeu9y3ycXct/1i7hcBEEQmgki6IIgCM0EXxP0dxq7AY3IyXrvct8nF3Lf9YhP+dAFQRCE2uNrFrogCIJQS3xG0JVSE5VSW5VSO5RSDzV2e2qDUqqDUmqhUmqTUuoPpdSdVnmMUuonpdR26zPaKldKqVese16vlBrkca6rrfrblVJXe5QPVkptsI55RXmu39XIKKX8lFJrlFJzrO+dlVLLrbZ+rpQKtMqDrO87rP0JHud42CrfqpQ6y6PcJ/8+lFJRSqnZSqktSqnNSqnhJ8PzVkrdbf2Nb1RKzVBKBTfX562U+kAplaqU2uhRVu/PuKJrVIrWutH/AX7ATqALEAisAxIbu121uI+2wCBrOxzYBiQCzwEPWeUPAf+wts8B5gEKGAYst8pjgF3WZ7S1HW3tW2HVVdaxZzf2fXvc/z3AZ8Ac6/ssYIq1/RZwi7V9K/CWtT0F+NzaTrSefRDQ2fqb8PPlvw/gI+AGazsQiGruzxtoB+wGQjye8zXN9XkDY4BBwEaPsnp/xhVdo9K2NvYfh9XY4cB8j+8PAw83drvq4L6+Bc4AtgJtrbK2wFZr+21gqkf9rdb+qcDbHuVvW2VtgS0e5aXqNfK9tgd+AU4D5lh/nOmAf9lnDMwHhlvb/lY9Vfa5O+v56t8HEGkJmypT3qyfN0bQ91vi5G8977Oa8/MGEigt6PX+jCu6RmX/fMXl4vwDcZJilTVZrNfKgcByoLXW+pC16zDQ2tqu6L4rK0/xUu4LvAQ8ADis77HAMa11ifXds62u+7P2Z1n1a/p7NDadgTTgQ8vV9J5SqgXN/HlrrQ8AzwP7gEOY55dM83/enjTEM67oGhXiK4LerFBKhQFfAndprbM992nT3Tar0CKl1HlAqtY6ubHb0sD4Y17F39RaDwRyMa/GLprp844GJmM6tHigBTCxURvViDTEM67uNXxF0A8AHTy+t7fKmhxKqQCMmH+qtf7KKj6ilGpr7W8LpFrlFd13ZeXtvZQ3NiOBSUqpPcBMjNvlZSBKKeVciNyzra77s/ZHAhnU/PdobFKAFK31cuv7bIzAN/fnPQHYrbVO01oXA19h/gaa+/P2pCGecUXXqBBfEfSVQHdrlDwQM3Dy30ZuU42xRqffBzZrrV/02PVfwDmqfTXGt+4sv8oaGR8GZFmvWPOBM5VS0ZY1dCbGp3gIyFZKDbOudZXHuRoNrfXDWuv2WusEzLNboLWeBiwELrGqlb1v5+9xiVVfW+VTrKiIzkB3zICRT/59aK0PA/uVUj2totOBTTTz541xtQxTSoVa7XLed7N+3mVoiGdc0TUqpjEHGsoMOpyDiQrZCfylsdtTy3sYhXktWg+stf6dg/EX/gJsB34GYqz6CnjduucNQJLHua4Ddlj/rvUoTwI2Wse8RpkBucb+B4zDHeXSBfMfdAfwBRBklQdb33dY+7t4HP8X69624hHR4at/H8AAYJX1zL/BRDA0++cNPAFssdr2MSZSpVk+b2AGZqygGPNWdn1DPOOKrlHZP5kpKgiC0EzwFZeLIAiCcIKIoAuCIDQTRNAFQRCaCSLogiAIzQQRdEEQhGaCCLogCEIzQQRdEAShmSCCLgiC0Ez4fzJRvCdeu5tlAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "iteration:    200, epoch:   1, train loss: 4.914431, valid loss: 0.000000, valid accuracy: 0.00%\n",
      "iteration:    300, epoch:   1, train loss: 5.134794, valid loss: 0.000000, valid accuracy: 0.00%\n",
      "iteration:    400, epoch:   2, train loss: 5.119534, valid loss: 4.621157, valid accuracy: 2.90%\n",
      "iteration:    500, epoch:   2, train loss: 4.966705, valid loss: 4.621157, valid accuracy: 2.90%\n",
      "iteration:    600, epoch:   2, train loss: 4.260160, valid loss: 4.621157, valid accuracy: 2.90%\n",
      "iteration:    700, epoch:   2, train loss: 4.525101, valid loss: 4.621157, valid accuracy: 2.90%\n",
      "iteration:    800, epoch:   3, train loss: 4.041388, valid loss: 4.173452, valid accuracy: 6.48%\n",
      "iteration:    900, epoch:   3, train loss: 4.056973, valid loss: 4.173452, valid accuracy: 6.48%\n",
      "iteration:   1000, epoch:   3, train loss: 4.079355, valid loss: 4.173452, valid accuracy: 6.48%\n",
      "iteration:   1100, epoch:   3, train loss: 3.845069, valid loss: 4.173452, valid accuracy: 6.48%\n",
      "iteration:   1200, epoch:   4, train loss: 3.921679, valid loss: 3.768086, valid accuracy: 11.14%\n",
      "iteration:   1300, epoch:   4, train loss: 3.783946, valid loss: 3.768086, valid accuracy: 11.14%\n",
      "iteration:   1400, epoch:   4, train loss: 3.656111, valid loss: 3.768086, valid accuracy: 11.14%\n",
      "iteration:   1500, epoch:   4, train loss: 3.864791, valid loss: 3.768086, valid accuracy: 11.14%\n",
      "iteration:   1600, epoch:   5, train loss: 3.532369, valid loss: 3.697321, valid accuracy: 12.56%\n",
      "iteration:   1700, epoch:   5, train loss: 3.764268, valid loss: 3.697321, valid accuracy: 12.56%\n",
      "iteration:   1800, epoch:   5, train loss: 3.909844, valid loss: 3.697321, valid accuracy: 12.56%\n",
      "iteration:   1900, epoch:   5, train loss: 3.611753, valid loss: 3.697321, valid accuracy: 12.56%\n",
      "iteration:   2000, epoch:   6, train loss: 3.505497, valid loss: 3.657469, valid accuracy: 14.61%\n",
      "iteration:   2100, epoch:   6, train loss: 3.561800, valid loss: 3.657469, valid accuracy: 14.61%\n",
      "iteration:   2200, epoch:   6, train loss: 3.575421, valid loss: 3.657469, valid accuracy: 14.61%\n",
      "iteration:   2300, epoch:   6, train loss: 3.434395, valid loss: 3.657469, valid accuracy: 14.61%\n",
      "iteration:   2400, epoch:   7, train loss: 3.309324, valid loss: 3.110811, valid accuracy: 21.75%\n",
      "iteration:   2500, epoch:   7, train loss: 3.094333, valid loss: 3.110811, valid accuracy: 21.75%\n",
      "iteration:   2600, epoch:   7, train loss: 3.077375, valid loss: 3.110811, valid accuracy: 21.75%\n",
      "iteration:   2700, epoch:   7, train loss: 2.965303, valid loss: 3.110811, valid accuracy: 21.75%\n",
      "iteration:   2800, epoch:   8, train loss: 3.194351, valid loss: 3.620363, valid accuracy: 15.20%\n",
      "iteration:   2900, epoch:   8, train loss: 2.964146, valid loss: 3.620363, valid accuracy: 15.20%\n",
      "iteration:   3000, epoch:   8, train loss: 2.804533, valid loss: 3.620363, valid accuracy: 15.20%\n",
      "iteration:   3100, epoch:   8, train loss: 2.838348, valid loss: 3.620363, valid accuracy: 15.20%\n",
      "iteration:   3200, epoch:   9, train loss: 3.055895, valid loss: 2.845621, valid accuracy: 28.03%\n",
      "iteration:   3300, epoch:   9, train loss: 2.720335, valid loss: 2.845621, valid accuracy: 28.03%\n",
      "iteration:   3400, epoch:   9, train loss: 2.890392, valid loss: 2.845621, valid accuracy: 28.03%\n",
      "iteration:   3500, epoch:   9, train loss: 2.633185, valid loss: 2.845621, valid accuracy: 28.03%\n",
      "iteration:   3600, epoch:  10, train loss: 2.672249, valid loss: 2.726970, valid accuracy: 30.31%\n",
      "iteration:   3700, epoch:  10, train loss: 2.814064, valid loss: 2.726970, valid accuracy: 30.31%\n",
      "iteration:   3800, epoch:  10, train loss: 2.898016, valid loss: 2.726970, valid accuracy: 30.31%\n",
      "iteration:   3900, epoch:  10, train loss: 2.645656, valid loss: 2.726970, valid accuracy: 30.31%\n",
      "iteration:   4000, epoch:  11, train loss: 2.327080, valid loss: 3.101898, valid accuracy: 26.98%\n",
      "iteration:   4100, epoch:  11, train loss: 2.705041, valid loss: 3.101898, valid accuracy: 26.98%\n",
      "iteration:   4200, epoch:  11, train loss: 2.559648, valid loss: 3.101898, valid accuracy: 26.98%\n",
      "iteration:   4300, epoch:  11, train loss: 2.554917, valid loss: 3.101898, valid accuracy: 26.98%\n",
      "iteration:   4400, epoch:  12, train loss: 2.352185, valid loss: 2.429240, valid accuracy: 38.14%\n",
      "iteration:   4500, epoch:  12, train loss: 2.517346, valid loss: 2.429240, valid accuracy: 38.14%\n",
      "iteration:   4600, epoch:  12, train loss: 2.350521, valid loss: 2.429240, valid accuracy: 38.14%\n",
      "iteration:   4700, epoch:  13, train loss: 2.210640, valid loss: 2.398415, valid accuracy: 37.07%\n",
      "iteration:   4800, epoch:  13, train loss: 2.539449, valid loss: 2.398415, valid accuracy: 37.07%\n",
      "iteration:   4900, epoch:  13, train loss: 2.322527, valid loss: 2.398415, valid accuracy: 37.07%\n",
      "iteration:   5000, epoch:  13, train loss: 2.260151, valid loss: 2.398415, valid accuracy: 37.07%\n",
      "iteration:   5100, epoch:  14, train loss: 2.173131, valid loss: 2.465228, valid accuracy: 37.10%\n",
      "iteration:   5200, epoch:  14, train loss: 2.333862, valid loss: 2.465228, valid accuracy: 37.10%\n",
      "iteration:   5300, epoch:  14, train loss: 2.191108, valid loss: 2.465228, valid accuracy: 37.10%\n",
      "iteration:   5400, epoch:  14, train loss: 2.178248, valid loss: 2.465228, valid accuracy: 37.10%\n",
      "iteration:   5500, epoch:  15, train loss: 2.167165, valid loss: 2.144328, valid accuracy: 43.14%\n",
      "iteration:   5600, epoch:  15, train loss: 2.304601, valid loss: 2.144328, valid accuracy: 43.14%\n",
      "iteration:   5700, epoch:  15, train loss: 2.204846, valid loss: 2.144328, valid accuracy: 43.14%\n",
      "iteration:   5800, epoch:  15, train loss: 2.295964, valid loss: 2.144328, valid accuracy: 43.14%\n",
      "iteration:   5900, epoch:  16, train loss: 1.848298, valid loss: 2.057396, valid accuracy: 45.03%\n",
      "iteration:   6000, epoch:  16, train loss: 2.174170, valid loss: 2.057396, valid accuracy: 45.03%\n",
      "iteration:   6100, epoch:  16, train loss: 1.756868, valid loss: 2.057396, valid accuracy: 45.03%\n",
      "iteration:   6200, epoch:  16, train loss: 1.984643, valid loss: 2.057396, valid accuracy: 45.03%\n",
      "iteration:   6300, epoch:  17, train loss: 2.154216, valid loss: 1.974870, valid accuracy: 47.28%\n",
      "iteration:   6400, epoch:  17, train loss: 1.950922, valid loss: 1.974870, valid accuracy: 47.28%\n",
      "iteration:   6500, epoch:  17, train loss: 2.499594, valid loss: 1.974870, valid accuracy: 47.28%\n",
      "iteration:   6600, epoch:  17, train loss: 2.070819, valid loss: 1.974870, valid accuracy: 47.28%\n",
      "iteration:   6700, epoch:  18, train loss: 1.985544, valid loss: 1.981200, valid accuracy: 47.82%\n",
      "iteration:   6800, epoch:  18, train loss: 2.179338, valid loss: 1.981200, valid accuracy: 47.82%\n",
      "iteration:   6900, epoch:  18, train loss: 2.250292, valid loss: 1.981200, valid accuracy: 47.82%\n",
      "iteration:   7000, epoch:  18, train loss: 1.937995, valid loss: 1.981200, valid accuracy: 47.82%\n",
      "iteration:   7100, epoch:  19, train loss: 2.029739, valid loss: 2.022861, valid accuracy: 45.94%\n",
      "iteration:   7200, epoch:  19, train loss: 1.805877, valid loss: 2.022861, valid accuracy: 45.94%\n",
      "iteration:   7300, epoch:  19, train loss: 1.866828, valid loss: 2.022861, valid accuracy: 45.94%\n",
      "iteration:   7400, epoch:  19, train loss: 2.266205, valid loss: 2.022861, valid accuracy: 45.94%\n",
      "iteration:   7500, epoch:  20, train loss: 2.071356, valid loss: 2.211602, valid accuracy: 42.44%\n",
      "iteration:   7600, epoch:  20, train loss: 2.046517, valid loss: 2.211602, valid accuracy: 42.44%\n",
      "iteration:   7700, epoch:  20, train loss: 2.085156, valid loss: 2.211602, valid accuracy: 42.44%\n",
      "iteration:   7800, epoch:  20, train loss: 1.912697, valid loss: 2.211602, valid accuracy: 42.44%\n",
      "iteration:   7900, epoch:  21, train loss: 1.988521, valid loss: 2.185776, valid accuracy: 42.98%\n",
      "iteration:   8000, epoch:  21, train loss: 1.858246, valid loss: 2.185776, valid accuracy: 42.98%\n",
      "iteration:   8100, epoch:  21, train loss: 1.965643, valid loss: 2.185776, valid accuracy: 42.98%\n",
      "iteration:   8200, epoch:  21, train loss: 1.732399, valid loss: 2.185776, valid accuracy: 42.98%\n",
      "iteration:   8300, epoch:  22, train loss: 1.607481, valid loss: 1.987131, valid accuracy: 47.77%\n",
      "iteration:   8400, epoch:  22, train loss: 1.743632, valid loss: 1.987131, valid accuracy: 47.77%\n",
      "iteration:   8500, epoch:  22, train loss: 1.722031, valid loss: 1.987131, valid accuracy: 47.77%\n",
      "iteration:   8600, epoch:  22, train loss: 1.782212, valid loss: 1.987131, valid accuracy: 47.77%\n",
      "iteration:   8700, epoch:  23, train loss: 1.963027, valid loss: 1.958812, valid accuracy: 49.18%\n",
      "iteration:   8800, epoch:  23, train loss: 1.703694, valid loss: 1.958812, valid accuracy: 49.18%\n",
      "iteration:   8900, epoch:  23, train loss: 2.003238, valid loss: 1.958812, valid accuracy: 49.18%\n",
      "iteration:   9000, epoch:  24, train loss: 1.679553, valid loss: 1.903454, valid accuracy: 48.98%\n",
      "iteration:   9100, epoch:  24, train loss: 1.899920, valid loss: 1.903454, valid accuracy: 48.98%\n",
      "iteration:   9200, epoch:  24, train loss: 1.731886, valid loss: 1.903454, valid accuracy: 48.98%\n",
      "iteration:   9300, epoch:  24, train loss: 1.967304, valid loss: 1.903454, valid accuracy: 48.98%\n",
      "iteration:   9400, epoch:  25, train loss: 1.684683, valid loss: 1.696996, valid accuracy: 53.91%\n",
      "iteration:   9500, epoch:  25, train loss: 1.707567, valid loss: 1.696996, valid accuracy: 53.91%\n",
      "iteration:   9600, epoch:  25, train loss: 1.667309, valid loss: 1.696996, valid accuracy: 53.91%\n",
      "iteration:   9700, epoch:  25, train loss: 1.828990, valid loss: 1.696996, valid accuracy: 53.91%\n",
      "iteration:   9800, epoch:  26, train loss: 1.814562, valid loss: 2.028291, valid accuracy: 46.30%\n",
      "iteration:   9900, epoch:  26, train loss: 1.855785, valid loss: 2.028291, valid accuracy: 46.30%\n",
      "iteration:  10000, epoch:  26, train loss: 1.711442, valid loss: 2.028291, valid accuracy: 46.30%\n",
      "iteration:  10100, epoch:  26, train loss: 1.500126, valid loss: 2.028291, valid accuracy: 46.30%\n",
      "iteration:  10200, epoch:  27, train loss: 1.493236, valid loss: 1.779501, valid accuracy: 52.10%\n",
      "iteration:  10300, epoch:  27, train loss: 1.778385, valid loss: 1.779501, valid accuracy: 52.10%\n",
      "iteration:  10400, epoch:  27, train loss: 1.631359, valid loss: 1.779501, valid accuracy: 52.10%\n",
      "iteration:  10500, epoch:  27, train loss: 1.717516, valid loss: 1.779501, valid accuracy: 52.10%\n",
      "iteration:  10600, epoch:  28, train loss: 1.676777, valid loss: 1.784669, valid accuracy: 52.97%\n",
      "iteration:  10700, epoch:  28, train loss: 1.547030, valid loss: 1.784669, valid accuracy: 52.97%\n",
      "iteration:  10800, epoch:  28, train loss: 1.734049, valid loss: 1.784669, valid accuracy: 52.97%\n",
      "iteration:  10900, epoch:  28, train loss: 1.515456, valid loss: 1.784669, valid accuracy: 52.97%\n",
      "iteration:  11000, epoch:  29, train loss: 1.731950, valid loss: 1.743144, valid accuracy: 53.34%\n",
      "iteration:  11100, epoch:  29, train loss: 1.793301, valid loss: 1.743144, valid accuracy: 53.34%\n",
      "iteration:  11200, epoch:  29, train loss: 1.640149, valid loss: 1.743144, valid accuracy: 53.34%\n",
      "iteration:  11300, epoch:  29, train loss: 1.857835, valid loss: 1.743144, valid accuracy: 53.34%\n",
      "iteration:  11400, epoch:  30, train loss: 1.506733, valid loss: 1.736685, valid accuracy: 52.79%\n",
      "iteration:  11500, epoch:  30, train loss: 1.678686, valid loss: 1.736685, valid accuracy: 52.79%\n",
      "iteration:  11600, epoch:  30, train loss: 1.591444, valid loss: 1.736685, valid accuracy: 52.79%\n",
      "iteration:  11700, epoch:  30, train loss: 1.759281, valid loss: 1.736685, valid accuracy: 52.79%\n",
      "iteration:  11800, epoch:  31, train loss: 1.531055, valid loss: 1.669329, valid accuracy: 54.82%\n",
      "iteration:  11900, epoch:  31, train loss: 1.418084, valid loss: 1.669329, valid accuracy: 54.82%\n",
      "iteration:  12000, epoch:  31, train loss: 1.612926, valid loss: 1.669329, valid accuracy: 54.82%\n",
      "iteration:  12100, epoch:  31, train loss: 1.699055, valid loss: 1.669329, valid accuracy: 54.82%\n",
      "iteration:  12200, epoch:  32, train loss: 1.368850, valid loss: 1.899997, valid accuracy: 51.30%\n",
      "iteration:  12300, epoch:  32, train loss: 1.574144, valid loss: 1.899997, valid accuracy: 51.30%\n",
      "iteration:  12400, epoch:  32, train loss: 1.624956, valid loss: 1.899997, valid accuracy: 51.30%\n",
      "iteration:  12500, epoch:  32, train loss: 1.527028, valid loss: 1.899997, valid accuracy: 51.30%\n",
      "iteration:  12600, epoch:  33, train loss: 1.462879, valid loss: 1.691010, valid accuracy: 54.97%\n",
      "iteration:  12700, epoch:  33, train loss: 1.622376, valid loss: 1.691010, valid accuracy: 54.97%\n",
      "iteration:  12800, epoch:  33, train loss: 1.455039, valid loss: 1.691010, valid accuracy: 54.97%\n",
      "iteration:  12900, epoch:  33, train loss: 1.442635, valid loss: 1.691010, valid accuracy: 54.97%\n",
      "iteration:  13000, epoch:  34, train loss: 1.522211, valid loss: 1.614262, valid accuracy: 56.69%\n",
      "iteration:  13100, epoch:  34, train loss: 1.547585, valid loss: 1.614262, valid accuracy: 56.69%\n",
      "iteration:  13200, epoch:  34, train loss: 1.393507, valid loss: 1.614262, valid accuracy: 56.69%\n",
      "iteration:  13300, epoch:  35, train loss: 1.395866, valid loss: 1.688455, valid accuracy: 55.44%\n",
      "iteration:  13400, epoch:  35, train loss: 1.537238, valid loss: 1.688455, valid accuracy: 55.44%\n",
      "iteration:  13500, epoch:  35, train loss: 1.409664, valid loss: 1.688455, valid accuracy: 55.44%\n",
      "iteration:  13600, epoch:  35, train loss: 1.589795, valid loss: 1.688455, valid accuracy: 55.44%\n",
      "iteration:  13700, epoch:  36, train loss: 1.610559, valid loss: 1.697662, valid accuracy: 54.45%\n",
      "iteration:  13800, epoch:  36, train loss: 1.592933, valid loss: 1.697662, valid accuracy: 54.45%\n",
      "iteration:  13900, epoch:  36, train loss: 1.427621, valid loss: 1.697662, valid accuracy: 54.45%\n",
      "iteration:  14000, epoch:  36, train loss: 1.452137, valid loss: 1.697662, valid accuracy: 54.45%\n",
      "iteration:  14100, epoch:  37, train loss: 1.343718, valid loss: 1.590400, valid accuracy: 57.09%\n",
      "iteration:  14200, epoch:  37, train loss: 1.514690, valid loss: 1.590400, valid accuracy: 57.09%\n",
      "iteration:  14300, epoch:  37, train loss: 1.506646, valid loss: 1.590400, valid accuracy: 57.09%\n",
      "iteration:  14400, epoch:  37, train loss: 1.529921, valid loss: 1.590400, valid accuracy: 57.09%\n",
      "iteration:  14500, epoch:  38, train loss: 1.284353, valid loss: 1.849831, valid accuracy: 51.71%\n",
      "iteration:  14600, epoch:  38, train loss: 1.528670, valid loss: 1.849831, valid accuracy: 51.71%\n",
      "iteration:  14700, epoch:  38, train loss: 1.577222, valid loss: 1.849831, valid accuracy: 51.71%\n",
      "iteration:  14800, epoch:  38, train loss: 1.555314, valid loss: 1.849831, valid accuracy: 51.71%\n",
      "iteration:  14900, epoch:  39, train loss: 1.451868, valid loss: 2.005914, valid accuracy: 49.80%\n",
      "iteration:  15000, epoch:  39, train loss: 1.493927, valid loss: 2.005914, valid accuracy: 49.80%\n",
      "iteration:  15100, epoch:  39, train loss: 1.360559, valid loss: 2.005914, valid accuracy: 49.80%\n",
      "iteration:  15200, epoch:  39, train loss: 1.562711, valid loss: 2.005914, valid accuracy: 49.80%\n",
      "iteration:  15300, epoch:  40, train loss: 1.510782, valid loss: 1.612773, valid accuracy: 56.81%\n",
      "iteration:  15400, epoch:  40, train loss: 1.560401, valid loss: 1.612773, valid accuracy: 56.81%\n",
      "iteration:  15500, epoch:  40, train loss: 1.481917, valid loss: 1.612773, valid accuracy: 56.81%\n",
      "iteration:  15600, epoch:  40, train loss: 1.432166, valid loss: 1.612773, valid accuracy: 56.81%\n",
      "iteration:  15700, epoch:  41, train loss: 1.429708, valid loss: 1.696144, valid accuracy: 56.06%\n",
      "iteration:  15800, epoch:  41, train loss: 1.353302, valid loss: 1.696144, valid accuracy: 56.06%\n",
      "iteration:  15900, epoch:  41, train loss: 1.400031, valid loss: 1.696144, valid accuracy: 56.06%\n",
      "iteration:  16000, epoch:  41, train loss: 1.466848, valid loss: 1.696144, valid accuracy: 56.06%\n",
      "iteration:  16100, epoch:  42, train loss: 1.239429, valid loss: 1.656896, valid accuracy: 56.03%\n",
      "iteration:  16200, epoch:  42, train loss: 1.517195, valid loss: 1.656896, valid accuracy: 56.03%\n",
      "iteration:  16300, epoch:  42, train loss: 1.376237, valid loss: 1.656896, valid accuracy: 56.03%\n",
      "iteration:  16400, epoch:  42, train loss: 1.369038, valid loss: 1.656896, valid accuracy: 56.03%\n",
      "iteration:  16500, epoch:  43, train loss: 1.416797, valid loss: 1.900078, valid accuracy: 52.48%\n",
      "iteration:  16600, epoch:  43, train loss: 1.393805, valid loss: 1.900078, valid accuracy: 52.48%\n",
      "iteration:  16700, epoch:  43, train loss: 1.459295, valid loss: 1.900078, valid accuracy: 52.48%\n",
      "iteration:  16800, epoch:  43, train loss: 1.667630, valid loss: 1.900078, valid accuracy: 52.48%\n",
      "iteration:  16900, epoch:  44, train loss: 1.146221, valid loss: 1.673449, valid accuracy: 56.82%\n",
      "iteration:  17000, epoch:  44, train loss: 1.440612, valid loss: 1.673449, valid accuracy: 56.82%\n",
      "iteration:  17100, epoch:  44, train loss: 1.347952, valid loss: 1.673449, valid accuracy: 56.82%\n",
      "iteration:  17200, epoch:  44, train loss: 1.341193, valid loss: 1.673449, valid accuracy: 56.82%\n",
      "iteration:  17300, epoch:  45, train loss: 1.199269, valid loss: 1.648314, valid accuracy: 56.47%\n",
      "iteration:  17400, epoch:  45, train loss: 1.269491, valid loss: 1.648314, valid accuracy: 56.47%\n",
      "iteration:  17500, epoch:  45, train loss: 1.301273, valid loss: 1.648314, valid accuracy: 56.47%\n",
      "iteration:  17600, epoch:  46, train loss: 1.179821, valid loss: 1.781785, valid accuracy: 54.29%\n",
      "iteration:  17700, epoch:  46, train loss: 1.417252, valid loss: 1.781785, valid accuracy: 54.29%\n",
      "iteration:  17800, epoch:  46, train loss: 1.229450, valid loss: 1.781785, valid accuracy: 54.29%\n",
      "iteration:  17900, epoch:  46, train loss: 1.272111, valid loss: 1.781785, valid accuracy: 54.29%\n",
      "iteration:  18000, epoch:  47, train loss: 1.463715, valid loss: 1.699215, valid accuracy: 55.53%\n",
      "iteration:  18100, epoch:  47, train loss: 1.412918, valid loss: 1.699215, valid accuracy: 55.53%\n",
      "iteration:  18200, epoch:  47, train loss: 1.248575, valid loss: 1.699215, valid accuracy: 55.53%\n",
      "iteration:  18300, epoch:  47, train loss: 1.473457, valid loss: 1.699215, valid accuracy: 55.53%\n",
      "iteration:  18400, epoch:  48, train loss: 1.318561, valid loss: 1.643414, valid accuracy: 56.80%\n",
      "iteration:  18500, epoch:  48, train loss: 1.444734, valid loss: 1.643414, valid accuracy: 56.80%\n",
      "iteration:  18600, epoch:  48, train loss: 1.476561, valid loss: 1.643414, valid accuracy: 56.80%\n",
      "iteration:  18700, epoch:  48, train loss: 1.349341, valid loss: 1.643414, valid accuracy: 56.80%\n",
      "iteration:  18800, epoch:  49, train loss: 1.235844, valid loss: 1.754591, valid accuracy: 54.07%\n",
      "iteration:  18900, epoch:  49, train loss: 1.343853, valid loss: 1.754591, valid accuracy: 54.07%\n",
      "iteration:  19000, epoch:  49, train loss: 1.253780, valid loss: 1.754591, valid accuracy: 54.07%\n",
      "iteration:  19100, epoch:  49, train loss: 1.385187, valid loss: 1.754591, valid accuracy: 54.07%\n",
      "iteration:  19200, epoch:  50, train loss: 1.232572, valid loss: 1.638230, valid accuracy: 56.71%\n",
      "iteration:  19300, epoch:  50, train loss: 1.458081, valid loss: 1.638230, valid accuracy: 56.71%\n",
      "iteration:  19400, epoch:  50, train loss: 1.327692, valid loss: 1.638230, valid accuracy: 56.71%\n",
      "iteration:  19500, epoch:  50, train loss: 1.291683, valid loss: 1.638230, valid accuracy: 56.71%\n",
      "iteration:  19600, epoch:  51, train loss: 1.325415, valid loss: 1.779692, valid accuracy: 54.87%\n",
      "iteration:  19700, epoch:  51, train loss: 1.377706, valid loss: 1.779692, valid accuracy: 54.87%\n",
      "iteration:  19800, epoch:  51, train loss: 1.306878, valid loss: 1.779692, valid accuracy: 54.87%\n",
      "iteration:  19900, epoch:  51, train loss: 1.476566, valid loss: 1.779692, valid accuracy: 54.87%\n",
      "iteration:  20000, epoch:  52, train loss: 1.424859, valid loss: 1.642283, valid accuracy: 57.54%\n",
      "iteration:  20100, epoch:  52, train loss: 1.416561, valid loss: 1.642283, valid accuracy: 57.54%\n",
      "iteration:  20200, epoch:  52, train loss: 1.266299, valid loss: 1.642283, valid accuracy: 57.54%\n",
      "iteration:  20300, epoch:  52, train loss: 1.389279, valid loss: 1.642283, valid accuracy: 57.54%\n",
      "iteration:  20400, epoch:  53, train loss: 1.319257, valid loss: 1.688514, valid accuracy: 55.52%\n",
      "iteration:  20500, epoch:  53, train loss: 1.353459, valid loss: 1.688514, valid accuracy: 55.52%\n",
      "iteration:  20600, epoch:  53, train loss: 1.316191, valid loss: 1.688514, valid accuracy: 55.52%\n",
      "iteration:  20700, epoch:  53, train loss: 1.331541, valid loss: 1.688514, valid accuracy: 55.52%\n",
      "iteration:  20800, epoch:  54, train loss: 1.210662, valid loss: 1.716247, valid accuracy: 56.62%\n",
      "iteration:  20900, epoch:  54, train loss: 1.321020, valid loss: 1.716247, valid accuracy: 56.62%\n",
      "iteration:  21000, epoch:  54, train loss: 1.455404, valid loss: 1.716247, valid accuracy: 56.62%\n",
      "iteration:  21100, epoch:  54, train loss: 1.354992, valid loss: 1.716247, valid accuracy: 56.62%\n",
      "iteration:  21200, epoch:  55, train loss: 1.360037, valid loss: 1.846187, valid accuracy: 53.37%\n",
      "iteration:  21300, epoch:  55, train loss: 1.391478, valid loss: 1.846187, valid accuracy: 53.37%\n",
      "iteration:  21400, epoch:  55, train loss: 1.499476, valid loss: 1.846187, valid accuracy: 53.37%\n",
      "iteration:  21500, epoch:  55, train loss: 1.287212, valid loss: 1.846187, valid accuracy: 53.37%\n",
      "iteration:  21600, epoch:  56, train loss: 1.081722, valid loss: 1.654542, valid accuracy: 56.64%\n",
      "iteration:  21700, epoch:  56, train loss: 1.272387, valid loss: 1.654542, valid accuracy: 56.64%\n",
      "iteration:  21800, epoch:  56, train loss: 1.513377, valid loss: 1.654542, valid accuracy: 56.64%\n",
      "iteration:  21900, epoch:  57, train loss: 1.343637, valid loss: 1.533587, valid accuracy: 59.38%\n",
      "iteration:  22000, epoch:  57, train loss: 1.188152, valid loss: 1.533587, valid accuracy: 59.38%\n",
      "iteration:  22100, epoch:  57, train loss: 1.279658, valid loss: 1.533587, valid accuracy: 59.38%\n",
      "iteration:  22200, epoch:  57, train loss: 1.559710, valid loss: 1.533587, valid accuracy: 59.38%\n",
      "iteration:  22300, epoch:  58, train loss: 1.359815, valid loss: 1.520148, valid accuracy: 59.76%\n",
      "iteration:  22400, epoch:  58, train loss: 1.212873, valid loss: 1.520148, valid accuracy: 59.76%\n",
      "iteration:  22500, epoch:  58, train loss: 1.074427, valid loss: 1.520148, valid accuracy: 59.76%\n",
      "iteration:  22600, epoch:  58, train loss: 1.444488, valid loss: 1.520148, valid accuracy: 59.76%\n",
      "iteration:  22700, epoch:  59, train loss: 1.235497, valid loss: 1.612079, valid accuracy: 57.25%\n",
      "iteration:  22800, epoch:  59, train loss: 1.279040, valid loss: 1.612079, valid accuracy: 57.25%\n",
      "iteration:  22900, epoch:  59, train loss: 1.218140, valid loss: 1.612079, valid accuracy: 57.25%\n",
      "iteration:  23000, epoch:  59, train loss: 1.278769, valid loss: 1.612079, valid accuracy: 57.25%\n",
      "iteration:  23100, epoch:  60, train loss: 1.259564, valid loss: 1.547171, valid accuracy: 59.19%\n",
      "iteration:  23200, epoch:  60, train loss: 1.197741, valid loss: 1.547171, valid accuracy: 59.19%\n",
      "iteration:  23300, epoch:  60, train loss: 1.295761, valid loss: 1.547171, valid accuracy: 59.19%\n",
      "iteration:  23400, epoch:  60, train loss: 1.266263, valid loss: 1.547171, valid accuracy: 59.19%\n",
      "iteration:  23500, epoch:  61, train loss: 1.066353, valid loss: 1.926041, valid accuracy: 51.20%\n",
      "iteration:  23600, epoch:  61, train loss: 1.312160, valid loss: 1.926041, valid accuracy: 51.20%\n",
      "iteration:  23700, epoch:  61, train loss: 1.288848, valid loss: 1.926041, valid accuracy: 51.20%\n",
      "iteration:  23800, epoch:  61, train loss: 1.263332, valid loss: 1.926041, valid accuracy: 51.20%\n",
      "iteration:  23900, epoch:  62, train loss: 1.109298, valid loss: 1.772703, valid accuracy: 54.97%\n",
      "iteration:  24000, epoch:  62, train loss: 1.173793, valid loss: 1.772703, valid accuracy: 54.97%\n",
      "iteration:  24100, epoch:  62, train loss: 1.268205, valid loss: 1.772703, valid accuracy: 54.97%\n",
      "iteration:  24200, epoch:  62, train loss: 1.276909, valid loss: 1.772703, valid accuracy: 54.97%\n",
      "iteration:  24300, epoch:  63, train loss: 1.330146, valid loss: 1.602351, valid accuracy: 58.21%\n",
      "iteration:  24400, epoch:  63, train loss: 1.201461, valid loss: 1.602351, valid accuracy: 58.21%\n",
      "iteration:  24500, epoch:  63, train loss: 1.312712, valid loss: 1.602351, valid accuracy: 58.21%\n",
      "iteration:  24600, epoch:  63, train loss: 1.145387, valid loss: 1.602351, valid accuracy: 58.21%\n",
      "iteration:  24700, epoch:  64, train loss: 1.285913, valid loss: 1.591326, valid accuracy: 59.65%\n",
      "iteration:  24800, epoch:  64, train loss: 1.381398, valid loss: 1.591326, valid accuracy: 59.65%\n",
      "iteration:  24900, epoch:  64, train loss: 1.347162, valid loss: 1.591326, valid accuracy: 59.65%\n",
      "iteration:  25000, epoch:  64, train loss: 1.239651, valid loss: 1.591326, valid accuracy: 59.65%\n",
      "iteration:  25100, epoch:  65, train loss: 1.141438, valid loss: 1.634241, valid accuracy: 57.47%\n",
      "iteration:  25200, epoch:  65, train loss: 1.191291, valid loss: 1.634241, valid accuracy: 57.47%\n",
      "iteration:  25300, epoch:  65, train loss: 1.207838, valid loss: 1.634241, valid accuracy: 57.47%\n",
      "iteration:  25400, epoch:  65, train loss: 1.176625, valid loss: 1.634241, valid accuracy: 57.47%\n",
      "iteration:  25500, epoch:  66, train loss: 1.188698, valid loss: 2.072495, valid accuracy: 51.44%\n",
      "iteration:  25600, epoch:  66, train loss: 1.113374, valid loss: 2.072495, valid accuracy: 51.44%\n",
      "iteration:  25700, epoch:  66, train loss: 1.310580, valid loss: 2.072495, valid accuracy: 51.44%\n",
      "iteration:  25800, epoch:  66, train loss: 1.222150, valid loss: 2.072495, valid accuracy: 51.44%\n",
      "iteration:  25900, epoch:  67, train loss: 1.393378, valid loss: 1.506512, valid accuracy: 60.73%\n",
      "iteration:  26000, epoch:  67, train loss: 1.239290, valid loss: 1.506512, valid accuracy: 60.73%\n",
      "iteration:  26100, epoch:  67, train loss: 1.077234, valid loss: 1.506512, valid accuracy: 60.73%\n",
      "iteration:  26200, epoch:  68, train loss: 1.174270, valid loss: 1.563411, valid accuracy: 59.23%\n",
      "iteration:  26300, epoch:  68, train loss: 1.191478, valid loss: 1.563411, valid accuracy: 59.23%\n",
      "iteration:  26400, epoch:  68, train loss: 1.091337, valid loss: 1.563411, valid accuracy: 59.23%\n",
      "iteration:  26500, epoch:  68, train loss: 1.277928, valid loss: 1.563411, valid accuracy: 59.23%\n",
      "iteration:  26600, epoch:  69, train loss: 1.333915, valid loss: 1.638330, valid accuracy: 57.86%\n",
      "iteration:  26700, epoch:  69, train loss: 1.310728, valid loss: 1.638330, valid accuracy: 57.86%\n",
      "iteration:  26800, epoch:  69, train loss: 1.091490, valid loss: 1.638330, valid accuracy: 57.86%\n",
      "iteration:  26900, epoch:  69, train loss: 1.224410, valid loss: 1.638330, valid accuracy: 57.86%\n",
      "iteration:  27000, epoch:  70, train loss: 1.253328, valid loss: 1.635779, valid accuracy: 58.09%\n",
      "iteration:  27100, epoch:  70, train loss: 1.255216, valid loss: 1.635779, valid accuracy: 58.09%\n",
      "iteration:  27200, epoch:  70, train loss: 1.089449, valid loss: 1.635779, valid accuracy: 58.09%\n",
      "iteration:  27300, epoch:  70, train loss: 1.401766, valid loss: 1.635779, valid accuracy: 58.09%\n",
      "iteration:  27400, epoch:  71, train loss: 1.267692, valid loss: 1.592641, valid accuracy: 59.02%\n",
      "iteration:  27500, epoch:  71, train loss: 1.279844, valid loss: 1.592641, valid accuracy: 59.02%\n",
      "iteration:  27600, epoch:  71, train loss: 1.173079, valid loss: 1.592641, valid accuracy: 59.02%\n",
      "iteration:  27700, epoch:  71, train loss: 1.239400, valid loss: 1.592641, valid accuracy: 59.02%\n",
      "iteration:  27800, epoch:  72, train loss: 1.070040, valid loss: 1.661877, valid accuracy: 58.11%\n",
      "iteration:  27900, epoch:  72, train loss: 1.245769, valid loss: 1.661877, valid accuracy: 58.11%\n",
      "iteration:  28000, epoch:  72, train loss: 1.173126, valid loss: 1.661877, valid accuracy: 58.11%\n",
      "iteration:  28100, epoch:  72, train loss: 1.320743, valid loss: 1.661877, valid accuracy: 58.11%\n",
      "iteration:  28200, epoch:  73, train loss: 1.044004, valid loss: 1.604578, valid accuracy: 58.67%\n",
      "iteration:  28300, epoch:  73, train loss: 1.094411, valid loss: 1.604578, valid accuracy: 58.67%\n",
      "iteration:  28400, epoch:  73, train loss: 1.192378, valid loss: 1.604578, valid accuracy: 58.67%\n",
      "iteration:  28500, epoch:  73, train loss: 1.167338, valid loss: 1.604578, valid accuracy: 58.67%\n",
      "iteration:  28600, epoch:  74, train loss: 1.218321, valid loss: 1.555952, valid accuracy: 60.05%\n",
      "iteration:  28700, epoch:  74, train loss: 1.273193, valid loss: 1.555952, valid accuracy: 60.05%\n",
      "iteration:  28800, epoch:  74, train loss: 1.332139, valid loss: 1.555952, valid accuracy: 60.05%\n",
      "iteration:  28900, epoch:  74, train loss: 1.157209, valid loss: 1.555952, valid accuracy: 60.05%\n",
      "iteration:  29000, epoch:  75, train loss: 1.091583, valid loss: 1.729066, valid accuracy: 56.26%\n",
      "iteration:  29100, epoch:  75, train loss: 1.149822, valid loss: 1.729066, valid accuracy: 56.26%\n",
      "iteration:  29200, epoch:  75, train loss: 1.200695, valid loss: 1.729066, valid accuracy: 56.26%\n",
      "iteration:  29300, epoch:  75, train loss: 1.393856, valid loss: 1.729066, valid accuracy: 56.26%\n",
      "iteration:  29400, epoch:  76, train loss: 1.065166, valid loss: 1.616143, valid accuracy: 58.06%\n",
      "iteration:  29500, epoch:  76, train loss: 1.223055, valid loss: 1.616143, valid accuracy: 58.06%\n",
      "iteration:  29600, epoch:  76, train loss: 1.269934, valid loss: 1.616143, valid accuracy: 58.06%\n",
      "iteration:  29700, epoch:  76, train loss: 1.064861, valid loss: 1.616143, valid accuracy: 58.06%\n",
      "iteration:  29800, epoch:  77, train loss: 1.119592, valid loss: 1.667037, valid accuracy: 58.13%\n",
      "iteration:  29900, epoch:  77, train loss: 1.129055, valid loss: 1.667037, valid accuracy: 58.13%\n",
      "iteration:  30000, epoch:  77, train loss: 1.045406, valid loss: 1.667037, valid accuracy: 58.13%\n",
      "iteration:  30100, epoch:  77, train loss: 1.021749, valid loss: 1.667037, valid accuracy: 58.13%\n",
      "iteration:  30200, epoch:  78, train loss: 1.200664, valid loss: 1.922086, valid accuracy: 52.23%\n",
      "iteration:  30300, epoch:  78, train loss: 1.289638, valid loss: 1.922086, valid accuracy: 52.23%\n",
      "iteration:  30400, epoch:  78, train loss: 1.177770, valid loss: 1.922086, valid accuracy: 52.23%\n",
      "iteration:  30500, epoch:  79, train loss: 1.065736, valid loss: 1.600701, valid accuracy: 58.47%\n",
      "iteration:  30600, epoch:  79, train loss: 1.199084, valid loss: 1.600701, valid accuracy: 58.47%\n",
      "iteration:  30700, epoch:  79, train loss: 1.190709, valid loss: 1.600701, valid accuracy: 58.47%\n",
      "iteration:  30800, epoch:  79, train loss: 1.141619, valid loss: 1.600701, valid accuracy: 58.47%\n",
      "iteration:  30900, epoch:  80, train loss: 1.181584, valid loss: 1.480414, valid accuracy: 61.16%\n",
      "iteration:  31000, epoch:  80, train loss: 1.127348, valid loss: 1.480414, valid accuracy: 61.16%\n",
      "iteration:  31100, epoch:  80, train loss: 1.100419, valid loss: 1.480414, valid accuracy: 61.16%\n",
      "iteration:  31200, epoch:  80, train loss: 1.173613, valid loss: 1.480414, valid accuracy: 61.16%\n",
      "iteration:  31300, epoch:  81, train loss: 1.126756, valid loss: 1.674327, valid accuracy: 57.73%\n",
      "iteration:  31400, epoch:  81, train loss: 1.212310, valid loss: 1.674327, valid accuracy: 57.73%\n",
      "iteration:  31500, epoch:  81, train loss: 1.375569, valid loss: 1.674327, valid accuracy: 57.73%\n",
      "iteration:  31600, epoch:  81, train loss: 1.289512, valid loss: 1.674327, valid accuracy: 57.73%\n",
      "iteration:  31700, epoch:  82, train loss: 1.196879, valid loss: 1.674081, valid accuracy: 57.51%\n",
      "iteration:  31800, epoch:  82, train loss: 1.152486, valid loss: 1.674081, valid accuracy: 57.51%\n",
      "iteration:  31900, epoch:  82, train loss: 1.086443, valid loss: 1.674081, valid accuracy: 57.51%\n",
      "iteration:  32000, epoch:  82, train loss: 1.166466, valid loss: 1.674081, valid accuracy: 57.51%\n",
      "iteration:  32100, epoch:  83, train loss: 1.051987, valid loss: 1.596838, valid accuracy: 58.90%\n",
      "iteration:  32200, epoch:  83, train loss: 1.221043, valid loss: 1.596838, valid accuracy: 58.90%\n",
      "iteration:  32300, epoch:  83, train loss: 1.203641, valid loss: 1.596838, valid accuracy: 58.90%\n",
      "iteration:  32400, epoch:  83, train loss: 1.292899, valid loss: 1.596838, valid accuracy: 58.90%\n",
      "iteration:  32500, epoch:  84, train loss: 0.995888, valid loss: 1.598750, valid accuracy: 59.12%\n",
      "iteration:  32600, epoch:  84, train loss: 0.994274, valid loss: 1.598750, valid accuracy: 59.12%\n",
      "iteration:  32700, epoch:  84, train loss: 1.071490, valid loss: 1.598750, valid accuracy: 59.12%\n",
      "iteration:  32800, epoch:  84, train loss: 1.109241, valid loss: 1.598750, valid accuracy: 59.12%\n",
      "iteration:  32900, epoch:  85, train loss: 1.213023, valid loss: 1.577416, valid accuracy: 60.09%\n",
      "iteration:  33000, epoch:  85, train loss: 1.021892, valid loss: 1.577416, valid accuracy: 60.09%\n",
      "iteration:  33100, epoch:  85, train loss: 1.113278, valid loss: 1.577416, valid accuracy: 60.09%\n",
      "iteration:  33200, epoch:  85, train loss: 1.285480, valid loss: 1.577416, valid accuracy: 60.09%\n",
      "iteration:  33300, epoch:  86, train loss: 0.980051, valid loss: 1.612884, valid accuracy: 59.27%\n",
      "iteration:  33400, epoch:  86, train loss: 0.982216, valid loss: 1.612884, valid accuracy: 59.27%\n",
      "iteration:  33500, epoch:  86, train loss: 1.133077, valid loss: 1.612884, valid accuracy: 59.27%\n",
      "iteration:  33600, epoch:  86, train loss: 1.181762, valid loss: 1.612884, valid accuracy: 59.27%\n",
      "iteration:  33700, epoch:  87, train loss: 1.130169, valid loss: 1.594773, valid accuracy: 59.41%\n",
      "iteration:  33800, epoch:  87, train loss: 1.075879, valid loss: 1.594773, valid accuracy: 59.41%\n",
      "iteration:  33900, epoch:  87, train loss: 1.203605, valid loss: 1.594773, valid accuracy: 59.41%\n",
      "iteration:  34000, epoch:  87, train loss: 1.129398, valid loss: 1.594773, valid accuracy: 59.41%\n",
      "iteration:  34100, epoch:  88, train loss: 1.034713, valid loss: 1.698700, valid accuracy: 57.56%\n",
      "iteration:  34200, epoch:  88, train loss: 1.049380, valid loss: 1.698700, valid accuracy: 57.56%\n",
      "iteration:  34300, epoch:  88, train loss: 1.056955, valid loss: 1.698700, valid accuracy: 57.56%\n",
      "iteration:  34400, epoch:  88, train loss: 1.135461, valid loss: 1.698700, valid accuracy: 57.56%\n",
      "iteration:  34500, epoch:  89, train loss: 1.015246, valid loss: 1.766227, valid accuracy: 55.36%\n",
      "iteration:  34600, epoch:  89, train loss: 1.224311, valid loss: 1.766227, valid accuracy: 55.36%\n",
      "iteration:  34700, epoch:  89, train loss: 1.394404, valid loss: 1.766227, valid accuracy: 55.36%\n",
      "iteration:  34800, epoch:  90, train loss: 1.167845, valid loss: 1.561014, valid accuracy: 60.42%\n",
      "iteration:  34900, epoch:  90, train loss: 1.090371, valid loss: 1.561014, valid accuracy: 60.42%\n",
      "iteration:  35000, epoch:  90, train loss: 1.039774, valid loss: 1.561014, valid accuracy: 60.42%\n",
      "iteration:  35100, epoch:  90, train loss: 1.175563, valid loss: 1.561014, valid accuracy: 60.42%\n",
      "iteration:  35200, epoch:  91, train loss: 1.072880, valid loss: 1.775720, valid accuracy: 56.03%\n",
      "iteration:  35300, epoch:  91, train loss: 1.010758, valid loss: 1.775720, valid accuracy: 56.03%\n",
      "iteration:  35400, epoch:  91, train loss: 1.043945, valid loss: 1.775720, valid accuracy: 56.03%\n",
      "iteration:  35500, epoch:  91, train loss: 1.050051, valid loss: 1.775720, valid accuracy: 56.03%\n",
      "iteration:  35600, epoch:  92, train loss: 1.013333, valid loss: 1.496968, valid accuracy: 61.20%\n",
      "iteration:  35700, epoch:  92, train loss: 1.016976, valid loss: 1.496968, valid accuracy: 61.20%\n",
      "iteration:  35800, epoch:  92, train loss: 1.105517, valid loss: 1.496968, valid accuracy: 61.20%\n",
      "iteration:  35900, epoch:  92, train loss: 1.163302, valid loss: 1.496968, valid accuracy: 61.20%\n",
      "iteration:  36000, epoch:  93, train loss: 0.974546, valid loss: 1.642557, valid accuracy: 58.69%\n",
      "iteration:  36100, epoch:  93, train loss: 1.072306, valid loss: 1.642557, valid accuracy: 58.69%\n",
      "iteration:  36200, epoch:  93, train loss: 1.030121, valid loss: 1.642557, valid accuracy: 58.69%\n",
      "iteration:  36300, epoch:  93, train loss: 1.080623, valid loss: 1.642557, valid accuracy: 58.69%\n",
      "iteration:  36400, epoch:  94, train loss: 1.013646, valid loss: 1.594048, valid accuracy: 59.92%\n",
      "iteration:  36500, epoch:  94, train loss: 1.065194, valid loss: 1.594048, valid accuracy: 59.92%\n",
      "iteration:  36600, epoch:  94, train loss: 1.178510, valid loss: 1.594048, valid accuracy: 59.92%\n",
      "iteration:  36700, epoch:  94, train loss: 1.140624, valid loss: 1.594048, valid accuracy: 59.92%\n",
      "iteration:  36800, epoch:  95, train loss: 0.951916, valid loss: 1.596152, valid accuracy: 59.02%\n",
      "iteration:  36900, epoch:  95, train loss: 1.164011, valid loss: 1.596152, valid accuracy: 59.02%\n",
      "iteration:  37000, epoch:  95, train loss: 1.039240, valid loss: 1.596152, valid accuracy: 59.02%\n",
      "iteration:  37100, epoch:  95, train loss: 1.110304, valid loss: 1.596152, valid accuracy: 59.02%\n",
      "iteration:  37200, epoch:  96, train loss: 1.001497, valid loss: 1.604010, valid accuracy: 59.92%\n",
      "iteration:  37300, epoch:  96, train loss: 1.001368, valid loss: 1.604010, valid accuracy: 59.92%\n",
      "iteration:  37400, epoch:  96, train loss: 1.070846, valid loss: 1.604010, valid accuracy: 59.92%\n",
      "iteration:  37500, epoch:  96, train loss: 1.192812, valid loss: 1.604010, valid accuracy: 59.92%\n",
      "iteration:  37600, epoch:  97, train loss: 1.164421, valid loss: 1.495670, valid accuracy: 61.42%\n",
      "iteration:  37700, epoch:  97, train loss: 0.985334, valid loss: 1.495670, valid accuracy: 61.42%\n",
      "iteration:  37800, epoch:  97, train loss: 0.970175, valid loss: 1.495670, valid accuracy: 61.42%\n",
      "iteration:  37900, epoch:  97, train loss: 1.135743, valid loss: 1.495670, valid accuracy: 61.42%\n",
      "iteration:  38000, epoch:  98, train loss: 1.007530, valid loss: 1.596481, valid accuracy: 59.04%\n",
      "iteration:  38100, epoch:  98, train loss: 0.996927, valid loss: 1.596481, valid accuracy: 59.04%\n",
      "iteration:  38200, epoch:  98, train loss: 1.034668, valid loss: 1.596481, valid accuracy: 59.04%\n",
      "iteration:  38300, epoch:  98, train loss: 1.200170, valid loss: 1.596481, valid accuracy: 59.04%\n",
      "iteration:  38400, epoch:  99, train loss: 0.931446, valid loss: 1.627641, valid accuracy: 59.53%\n",
      "iteration:  38500, epoch:  99, train loss: 0.861766, valid loss: 1.627641, valid accuracy: 59.53%\n",
      "iteration:  38600, epoch:  99, train loss: 1.000135, valid loss: 1.627641, valid accuracy: 59.53%\n",
      "iteration:  38700, epoch:  99, train loss: 1.172297, valid loss: 1.627641, valid accuracy: 59.53%\n",
      "iteration:  38800, epoch: 100, train loss: 0.955800, valid loss: 1.517820, valid accuracy: 61.64%\n",
      "iteration:  38900, epoch: 100, train loss: 1.165089, valid loss: 1.517820, valid accuracy: 61.64%\n",
      "iteration:  39000, epoch: 100, train loss: 1.018434, valid loss: 1.517820, valid accuracy: 61.64%\n",
      "iteration:  39100, epoch: 100, train loss: 1.009384, valid loss: 1.517820, valid accuracy: 61.64%\n",
      "iteration:  39200, epoch: 101, train loss: 0.912030, valid loss: 1.689508, valid accuracy: 58.66%\n",
      "iteration:  39300, epoch: 101, train loss: 1.024524, valid loss: 1.689508, valid accuracy: 58.66%\n",
      "iteration:  39400, epoch: 101, train loss: 0.973825, valid loss: 1.689508, valid accuracy: 58.66%\n",
      "iteration:  39500, epoch: 102, train loss: 0.991656, valid loss: 1.729511, valid accuracy: 57.33%\n",
      "iteration:  39600, epoch: 102, train loss: 0.981038, valid loss: 1.729511, valid accuracy: 57.33%\n",
      "iteration:  39700, epoch: 102, train loss: 1.039602, valid loss: 1.729511, valid accuracy: 57.33%\n",
      "iteration:  39800, epoch: 102, train loss: 1.174456, valid loss: 1.729511, valid accuracy: 57.33%\n",
      "iteration:  39900, epoch: 103, train loss: 1.023110, valid loss: 1.596393, valid accuracy: 60.08%\n",
      "iteration:  40000, epoch: 103, train loss: 1.020869, valid loss: 1.596393, valid accuracy: 60.08%\n",
      "iteration:  40100, epoch: 103, train loss: 1.022898, valid loss: 1.596393, valid accuracy: 60.08%\n",
      "iteration:  40200, epoch: 103, train loss: 1.208365, valid loss: 1.596393, valid accuracy: 60.08%\n",
      "iteration:  40300, epoch: 104, train loss: 0.989736, valid loss: 1.721177, valid accuracy: 57.51%\n",
      "iteration:  40400, epoch: 104, train loss: 1.014630, valid loss: 1.721177, valid accuracy: 57.51%\n",
      "iteration:  40500, epoch: 104, train loss: 0.994658, valid loss: 1.721177, valid accuracy: 57.51%\n",
      "iteration:  40600, epoch: 104, train loss: 1.045221, valid loss: 1.721177, valid accuracy: 57.51%\n",
      "iteration:  40700, epoch: 105, train loss: 1.009809, valid loss: 1.616617, valid accuracy: 58.80%\n",
      "iteration:  40800, epoch: 105, train loss: 1.041145, valid loss: 1.616617, valid accuracy: 58.80%\n",
      "iteration:  40900, epoch: 105, train loss: 1.116097, valid loss: 1.616617, valid accuracy: 58.80%\n",
      "iteration:  41000, epoch: 105, train loss: 1.130271, valid loss: 1.616617, valid accuracy: 58.80%\n",
      "iteration:  41100, epoch: 106, train loss: 0.958918, valid loss: 1.677713, valid accuracy: 59.07%\n",
      "iteration:  41200, epoch: 106, train loss: 0.990622, valid loss: 1.677713, valid accuracy: 59.07%\n",
      "iteration:  41300, epoch: 106, train loss: 1.077639, valid loss: 1.677713, valid accuracy: 59.07%\n",
      "iteration:  41400, epoch: 106, train loss: 1.064982, valid loss: 1.677713, valid accuracy: 59.07%\n",
      "iteration:  41500, epoch: 107, train loss: 1.106037, valid loss: 1.564756, valid accuracy: 60.89%\n",
      "iteration:  41600, epoch: 107, train loss: 1.136137, valid loss: 1.564756, valid accuracy: 60.89%\n",
      "iteration:  41700, epoch: 107, train loss: 1.057173, valid loss: 1.564756, valid accuracy: 60.89%\n",
      "iteration:  41800, epoch: 107, train loss: 1.070461, valid loss: 1.564756, valid accuracy: 60.89%\n",
      "iteration:  41900, epoch: 108, train loss: 0.861538, valid loss: 1.456700, valid accuracy: 62.43%\n",
      "iteration:  42000, epoch: 108, train loss: 0.991564, valid loss: 1.456700, valid accuracy: 62.43%\n",
      "iteration:  42100, epoch: 108, train loss: 1.197016, valid loss: 1.456700, valid accuracy: 62.43%\n",
      "iteration:  42200, epoch: 108, train loss: 1.048187, valid loss: 1.456700, valid accuracy: 62.43%\n",
      "iteration:  42300, epoch: 109, train loss: 0.974467, valid loss: 1.548533, valid accuracy: 61.81%\n",
      "iteration:  42400, epoch: 109, train loss: 0.983189, valid loss: 1.548533, valid accuracy: 61.81%\n",
      "iteration:  42500, epoch: 109, train loss: 1.184768, valid loss: 1.548533, valid accuracy: 61.81%\n",
      "iteration:  42600, epoch: 109, train loss: 1.128985, valid loss: 1.548533, valid accuracy: 61.81%\n",
      "iteration:  42700, epoch: 110, train loss: 0.817038, valid loss: 1.654546, valid accuracy: 58.18%\n",
      "iteration:  42800, epoch: 110, train loss: 0.998859, valid loss: 1.654546, valid accuracy: 58.18%\n",
      "iteration:  42900, epoch: 110, train loss: 1.094097, valid loss: 1.654546, valid accuracy: 58.18%\n",
      "iteration:  43000, epoch: 110, train loss: 1.082156, valid loss: 1.654546, valid accuracy: 58.18%\n",
      "iteration:  43100, epoch: 111, train loss: 0.944507, valid loss: 1.700791, valid accuracy: 58.53%\n",
      "iteration:  43200, epoch: 111, train loss: 1.003987, valid loss: 1.700791, valid accuracy: 58.53%\n",
      "iteration:  43300, epoch: 111, train loss: 0.995338, valid loss: 1.700791, valid accuracy: 58.53%\n",
      "iteration:  43400, epoch: 111, train loss: 1.051876, valid loss: 1.700791, valid accuracy: 58.53%\n",
      "iteration:  43500, epoch: 112, train loss: 0.999150, valid loss: 1.624947, valid accuracy: 59.29%\n",
      "iteration:  43600, epoch: 112, train loss: 0.982077, valid loss: 1.624947, valid accuracy: 59.29%\n",
      "iteration:  43700, epoch: 112, train loss: 1.066910, valid loss: 1.624947, valid accuracy: 59.29%\n",
      "iteration:  43800, epoch: 113, train loss: 0.951186, valid loss: 1.687411, valid accuracy: 58.45%\n",
      "iteration:  43900, epoch: 113, train loss: 0.999913, valid loss: 1.687411, valid accuracy: 58.45%\n",
      "iteration:  44000, epoch: 113, train loss: 0.902136, valid loss: 1.687411, valid accuracy: 58.45%\n",
      "iteration:  44100, epoch: 113, train loss: 0.902274, valid loss: 1.687411, valid accuracy: 58.45%\n",
      "iteration:  44200, epoch: 114, train loss: 0.897882, valid loss: 1.626414, valid accuracy: 59.33%\n",
      "iteration:  44300, epoch: 114, train loss: 0.951932, valid loss: 1.626414, valid accuracy: 59.33%\n",
      "iteration:  44400, epoch: 114, train loss: 0.946016, valid loss: 1.626414, valid accuracy: 59.33%\n",
      "iteration:  44500, epoch: 114, train loss: 1.147947, valid loss: 1.626414, valid accuracy: 59.33%\n",
      "iteration:  44600, epoch: 115, train loss: 0.946144, valid loss: 1.529373, valid accuracy: 61.24%\n",
      "iteration:  44700, epoch: 115, train loss: 1.084807, valid loss: 1.529373, valid accuracy: 61.24%\n",
      "iteration:  44800, epoch: 115, train loss: 0.926304, valid loss: 1.529373, valid accuracy: 61.24%\n",
      "iteration:  44900, epoch: 115, train loss: 1.050377, valid loss: 1.529373, valid accuracy: 61.24%\n",
      "iteration:  45000, epoch: 116, train loss: 1.060016, valid loss: 1.502415, valid accuracy: 61.36%\n",
      "iteration:  45100, epoch: 116, train loss: 0.987963, valid loss: 1.502415, valid accuracy: 61.36%\n",
      "iteration:  45200, epoch: 116, train loss: 1.019759, valid loss: 1.502415, valid accuracy: 61.36%\n",
      "iteration:  45300, epoch: 116, train loss: 1.036760, valid loss: 1.502415, valid accuracy: 61.36%\n",
      "iteration:  45400, epoch: 117, train loss: 0.832695, valid loss: 1.637326, valid accuracy: 58.93%\n",
      "iteration:  45600, epoch: 117, train loss: 1.098856, valid loss: 1.637326, valid accuracy: 58.93%\n",
      "iteration:  45700, epoch: 117, train loss: 0.975285, valid loss: 1.637326, valid accuracy: 58.93%\n",
      "iteration:  45800, epoch: 118, train loss: 1.050233, valid loss: 1.686473, valid accuracy: 59.11%\n",
      "iteration:  45900, epoch: 118, train loss: 0.870559, valid loss: 1.686473, valid accuracy: 59.11%\n",
      "iteration:  46000, epoch: 118, train loss: 1.034540, valid loss: 1.686473, valid accuracy: 59.11%\n",
      "iteration:  46100, epoch: 118, train loss: 1.002766, valid loss: 1.686473, valid accuracy: 59.11%\n",
      "iteration:  46200, epoch: 119, train loss: 0.920196, valid loss: 1.559549, valid accuracy: 61.31%\n",
      "iteration:  46300, epoch: 119, train loss: 0.951425, valid loss: 1.559549, valid accuracy: 61.31%\n",
      "iteration:  46400, epoch: 119, train loss: 0.931112, valid loss: 1.559549, valid accuracy: 61.31%\n",
      "iteration:  46500, epoch: 119, train loss: 0.944714, valid loss: 1.559549, valid accuracy: 61.31%\n",
      "iteration:  46600, epoch: 120, train loss: 0.988364, valid loss: 1.538444, valid accuracy: 60.69%\n",
      "iteration:  46700, epoch: 120, train loss: 0.911629, valid loss: 1.538444, valid accuracy: 60.69%\n",
      "iteration:  46800, epoch: 120, train loss: 1.125909, valid loss: 1.538444, valid accuracy: 60.69%\n",
      "iteration:  46900, epoch: 120, train loss: 0.950804, valid loss: 1.538444, valid accuracy: 60.69%\n",
      "iteration:  47000, epoch: 121, train loss: 0.908009, valid loss: 1.531683, valid accuracy: 61.24%\n",
      "iteration:  47100, epoch: 121, train loss: 0.908554, valid loss: 1.531683, valid accuracy: 61.24%\n",
      "iteration:  47200, epoch: 121, train loss: 0.958306, valid loss: 1.531683, valid accuracy: 61.24%\n",
      "iteration:  47300, epoch: 121, train loss: 0.903278, valid loss: 1.531683, valid accuracy: 61.24%\n",
      "iteration:  47400, epoch: 122, train loss: 0.915070, valid loss: 1.698458, valid accuracy: 58.24%\n",
      "iteration:  47500, epoch: 122, train loss: 1.117239, valid loss: 1.698458, valid accuracy: 58.24%\n",
      "iteration:  47600, epoch: 122, train loss: 0.822686, valid loss: 1.698458, valid accuracy: 58.24%\n",
      "iteration:  47700, epoch: 122, train loss: 0.943784, valid loss: 1.698458, valid accuracy: 58.24%\n",
      "iteration:  47800, epoch: 123, train loss: 0.847619, valid loss: 1.472810, valid accuracy: 63.06%\n",
      "iteration:  47900, epoch: 123, train loss: 0.907099, valid loss: 1.472810, valid accuracy: 63.06%\n",
      "iteration:  48000, epoch: 123, train loss: 0.928673, valid loss: 1.472810, valid accuracy: 63.06%\n",
      "iteration:  48100, epoch: 124, train loss: 0.841220, valid loss: 1.538787, valid accuracy: 61.10%\n",
      "iteration:  48200, epoch: 124, train loss: 1.046222, valid loss: 1.538787, valid accuracy: 61.10%\n",
      "iteration:  48300, epoch: 124, train loss: 0.923551, valid loss: 1.538787, valid accuracy: 61.10%\n",
      "iteration:  48400, epoch: 124, train loss: 1.049787, valid loss: 1.538787, valid accuracy: 61.10%\n",
      "iteration:  48500, epoch: 125, train loss: 0.867160, valid loss: 1.540782, valid accuracy: 61.25%\n",
      "iteration:  48600, epoch: 125, train loss: 0.923911, valid loss: 1.540782, valid accuracy: 61.25%\n",
      "iteration:  48700, epoch: 125, train loss: 0.906222, valid loss: 1.540782, valid accuracy: 61.25%\n",
      "iteration:  48800, epoch: 125, train loss: 0.911654, valid loss: 1.540782, valid accuracy: 61.25%\n",
      "iteration:  48900, epoch: 126, train loss: 0.896482, valid loss: 1.754977, valid accuracy: 56.95%\n",
      "iteration:  49000, epoch: 126, train loss: 0.958084, valid loss: 1.754977, valid accuracy: 56.95%\n",
      "iteration:  49100, epoch: 126, train loss: 1.097497, valid loss: 1.754977, valid accuracy: 56.95%\n",
      "iteration:  49200, epoch: 126, train loss: 0.958574, valid loss: 1.754977, valid accuracy: 56.95%\n",
      "iteration:  49300, epoch: 127, train loss: 1.053671, valid loss: 1.572005, valid accuracy: 60.68%\n",
      "iteration:  49400, epoch: 127, train loss: 1.074216, valid loss: 1.572005, valid accuracy: 60.68%\n",
      "iteration:  49500, epoch: 127, train loss: 0.849846, valid loss: 1.572005, valid accuracy: 60.68%\n",
      "iteration:  49600, epoch: 127, train loss: 0.842031, valid loss: 1.572005, valid accuracy: 60.68%\n",
      "iteration:  49700, epoch: 128, train loss: 0.983817, valid loss: 1.629066, valid accuracy: 60.26%\n",
      "iteration:  49800, epoch: 128, train loss: 0.856843, valid loss: 1.629066, valid accuracy: 60.26%\n",
      "iteration:  49900, epoch: 128, train loss: 0.910071, valid loss: 1.629066, valid accuracy: 60.26%\n",
      "iteration:  50000, epoch: 128, train loss: 0.881069, valid loss: 1.629066, valid accuracy: 60.26%\n",
      "iteration:  50100, epoch: 129, train loss: 0.935642, valid loss: 1.524532, valid accuracy: 61.49%\n",
      "iteration:  50200, epoch: 129, train loss: 1.003484, valid loss: 1.524532, valid accuracy: 61.49%\n",
      "iteration:  50300, epoch: 129, train loss: 0.861882, valid loss: 1.524532, valid accuracy: 61.49%\n",
      "iteration:  50400, epoch: 129, train loss: 0.923135, valid loss: 1.524532, valid accuracy: 61.49%\n",
      "iteration:  50500, epoch: 130, train loss: 0.895571, valid loss: 1.631375, valid accuracy: 60.69%\n",
      "iteration:  50600, epoch: 130, train loss: 0.957018, valid loss: 1.631375, valid accuracy: 60.69%\n",
      "iteration:  50700, epoch: 130, train loss: 0.974591, valid loss: 1.631375, valid accuracy: 60.69%\n",
      "iteration:  50800, epoch: 130, train loss: 1.060980, valid loss: 1.631375, valid accuracy: 60.69%\n",
      "iteration:  50900, epoch: 131, train loss: 0.920817, valid loss: 1.547987, valid accuracy: 61.85%\n",
      "iteration:  51000, epoch: 131, train loss: 0.836901, valid loss: 1.547987, valid accuracy: 61.85%\n",
      "iteration:  51100, epoch: 131, train loss: 0.931568, valid loss: 1.547987, valid accuracy: 61.85%\n",
      "iteration:  51200, epoch: 131, train loss: 1.001562, valid loss: 1.547987, valid accuracy: 61.85%\n",
      "iteration:  51300, epoch: 132, train loss: 1.025635, valid loss: 1.632586, valid accuracy: 60.08%\n",
      "iteration:  51400, epoch: 132, train loss: 0.930767, valid loss: 1.632586, valid accuracy: 60.08%\n",
      "iteration:  51500, epoch: 132, train loss: 0.971170, valid loss: 1.632586, valid accuracy: 60.08%\n",
      "iteration:  51600, epoch: 132, train loss: 0.977373, valid loss: 1.632586, valid accuracy: 60.08%\n",
      "iteration:  51700, epoch: 133, train loss: 0.892659, valid loss: 1.539160, valid accuracy: 61.70%\n",
      "iteration:  51800, epoch: 133, train loss: 0.808494, valid loss: 1.539160, valid accuracy: 61.70%\n",
      "iteration:  51900, epoch: 133, train loss: 1.011487, valid loss: 1.539160, valid accuracy: 61.70%\n",
      "iteration:  52000, epoch: 133, train loss: 0.850660, valid loss: 1.539160, valid accuracy: 61.70%\n",
      "iteration:  52100, epoch: 134, train loss: 0.920678, valid loss: 1.492444, valid accuracy: 62.46%\n",
      "iteration:  52200, epoch: 134, train loss: 0.868267, valid loss: 1.492444, valid accuracy: 62.46%\n",
      "iteration:  52300, epoch: 134, train loss: 0.952246, valid loss: 1.492444, valid accuracy: 62.46%\n",
      "iteration:  52400, epoch: 135, train loss: 0.945366, valid loss: 1.594185, valid accuracy: 60.90%\n",
      "iteration:  52500, epoch: 135, train loss: 0.816223, valid loss: 1.594185, valid accuracy: 60.90%\n",
      "iteration:  52600, epoch: 135, train loss: 0.802508, valid loss: 1.594185, valid accuracy: 60.90%\n",
      "iteration:  52700, epoch: 135, train loss: 0.991726, valid loss: 1.594185, valid accuracy: 60.90%\n",
      "iteration:  52800, epoch: 136, train loss: 0.837376, valid loss: 1.538480, valid accuracy: 61.69%\n",
      "iteration:  52900, epoch: 136, train loss: 0.816304, valid loss: 1.538480, valid accuracy: 61.69%\n",
      "iteration:  53000, epoch: 136, train loss: 0.935417, valid loss: 1.538480, valid accuracy: 61.69%\n",
      "iteration:  53100, epoch: 136, train loss: 0.979807, valid loss: 1.538480, valid accuracy: 61.69%\n",
      "iteration:  53200, epoch: 137, train loss: 0.882742, valid loss: 1.531619, valid accuracy: 62.32%\n",
      "iteration:  53300, epoch: 137, train loss: 0.864832, valid loss: 1.531619, valid accuracy: 62.32%\n",
      "iteration:  53400, epoch: 137, train loss: 0.915310, valid loss: 1.531619, valid accuracy: 62.32%\n",
      "iteration:  53500, epoch: 137, train loss: 0.950489, valid loss: 1.531619, valid accuracy: 62.32%\n",
      "iteration:  53600, epoch: 138, train loss: 0.891963, valid loss: 1.492105, valid accuracy: 62.38%\n",
      "iteration:  53700, epoch: 138, train loss: 0.917447, valid loss: 1.492105, valid accuracy: 62.38%\n",
      "iteration:  53800, epoch: 138, train loss: 0.916551, valid loss: 1.492105, valid accuracy: 62.38%\n",
      "iteration:  53900, epoch: 138, train loss: 0.819871, valid loss: 1.492105, valid accuracy: 62.38%\n",
      "iteration:  54000, epoch: 139, train loss: 0.880781, valid loss: 1.654927, valid accuracy: 59.53%\n",
      "iteration:  54100, epoch: 139, train loss: 0.827615, valid loss: 1.654927, valid accuracy: 59.53%\n",
      "iteration:  54200, epoch: 139, train loss: 0.979480, valid loss: 1.654927, valid accuracy: 59.53%\n",
      "iteration:  54300, epoch: 139, train loss: 0.975660, valid loss: 1.654927, valid accuracy: 59.53%\n",
      "iteration:  54400, epoch: 140, train loss: 0.893460, valid loss: 1.655394, valid accuracy: 60.16%\n",
      "iteration:  54500, epoch: 140, train loss: 0.949869, valid loss: 1.655394, valid accuracy: 60.16%\n",
      "iteration:  54600, epoch: 140, train loss: 0.906108, valid loss: 1.655394, valid accuracy: 60.16%\n",
      "iteration:  54700, epoch: 140, train loss: 0.824099, valid loss: 1.655394, valid accuracy: 60.16%\n",
      "iteration:  54800, epoch: 141, train loss: 0.804015, valid loss: 1.443097, valid accuracy: 63.51%\n",
      "iteration:  54900, epoch: 141, train loss: 0.870327, valid loss: 1.443097, valid accuracy: 63.51%\n",
      "iteration:  55000, epoch: 141, train loss: 0.848815, valid loss: 1.443097, valid accuracy: 63.51%\n",
      "iteration:  55100, epoch: 141, train loss: 0.875900, valid loss: 1.443097, valid accuracy: 63.51%\n",
      "iteration:  55200, epoch: 142, train loss: 0.925260, valid loss: 1.504416, valid accuracy: 62.29%\n",
      "iteration:  55300, epoch: 142, train loss: 0.819753, valid loss: 1.504416, valid accuracy: 62.29%\n",
      "iteration:  55400, epoch: 142, train loss: 0.876904, valid loss: 1.504416, valid accuracy: 62.29%\n",
      "iteration:  55500, epoch: 142, train loss: 0.904033, valid loss: 1.504416, valid accuracy: 62.29%\n",
      "iteration:  55600, epoch: 143, train loss: 0.781670, valid loss: 1.699103, valid accuracy: 59.09%\n",
      "iteration:  55700, epoch: 143, train loss: 0.789079, valid loss: 1.699103, valid accuracy: 59.09%\n",
      "iteration:  55800, epoch: 143, train loss: 0.851607, valid loss: 1.699103, valid accuracy: 59.09%\n",
      "iteration:  55900, epoch: 143, train loss: 0.898905, valid loss: 1.699103, valid accuracy: 59.09%\n",
      "iteration:  56000, epoch: 144, train loss: 0.938283, valid loss: 1.519403, valid accuracy: 62.87%\n",
      "iteration:  56100, epoch: 144, train loss: 0.953822, valid loss: 1.519403, valid accuracy: 62.87%\n",
      "iteration:  56200, epoch: 144, train loss: 0.890359, valid loss: 1.519403, valid accuracy: 62.87%\n",
      "iteration:  56300, epoch: 144, train loss: 0.895634, valid loss: 1.519403, valid accuracy: 62.87%\n",
      "iteration:  56400, epoch: 145, train loss: 0.848872, valid loss: 1.658444, valid accuracy: 59.40%\n",
      "iteration:  56500, epoch: 145, train loss: 1.015965, valid loss: 1.658444, valid accuracy: 59.40%\n",
      "iteration:  56600, epoch: 145, train loss: 0.878644, valid loss: 1.658444, valid accuracy: 59.40%\n",
      "iteration:  56700, epoch: 146, train loss: 0.882079, valid loss: 1.553996, valid accuracy: 61.41%\n",
      "iteration:  56800, epoch: 146, train loss: 0.856276, valid loss: 1.553996, valid accuracy: 61.41%\n",
      "iteration:  56900, epoch: 146, train loss: 0.904878, valid loss: 1.553996, valid accuracy: 61.41%\n",
      "iteration:  57000, epoch: 146, train loss: 0.901281, valid loss: 1.553996, valid accuracy: 61.41%\n",
      "iteration:  57100, epoch: 147, train loss: 0.840598, valid loss: 1.533400, valid accuracy: 62.16%\n",
      "iteration:  57200, epoch: 147, train loss: 0.909578, valid loss: 1.533400, valid accuracy: 62.16%\n",
      "iteration:  57300, epoch: 147, train loss: 0.843776, valid loss: 1.533400, valid accuracy: 62.16%\n",
      "iteration:  57400, epoch: 147, train loss: 0.816544, valid loss: 1.533400, valid accuracy: 62.16%\n",
      "iteration:  57500, epoch: 148, train loss: 0.887176, valid loss: 1.477441, valid accuracy: 62.97%\n",
      "iteration:  57600, epoch: 148, train loss: 0.857168, valid loss: 1.477441, valid accuracy: 62.97%\n",
      "iteration:  57700, epoch: 148, train loss: 0.966094, valid loss: 1.477441, valid accuracy: 62.97%\n",
      "iteration:  57800, epoch: 148, train loss: 0.912473, valid loss: 1.477441, valid accuracy: 62.97%\n",
      "iteration:  57900, epoch: 149, train loss: 0.878101, valid loss: 1.597994, valid accuracy: 60.63%\n",
      "iteration:  58000, epoch: 149, train loss: 0.868684, valid loss: 1.597994, valid accuracy: 60.63%\n",
      "iteration:  58100, epoch: 149, train loss: 0.783360, valid loss: 1.597994, valid accuracy: 60.63%\n",
      "iteration:  58200, epoch: 149, train loss: 0.818678, valid loss: 1.597994, valid accuracy: 60.63%\n",
      "iteration:  58300, epoch: 150, train loss: 0.791098, valid loss: 1.395090, valid accuracy: 63.95%\n",
      "iteration:  58400, epoch: 150, train loss: 0.904357, valid loss: 1.395090, valid accuracy: 63.95%\n",
      "iteration:  58500, epoch: 150, train loss: 0.844763, valid loss: 1.395090, valid accuracy: 63.95%\n",
      "iteration:  58600, epoch: 150, train loss: 0.869245, valid loss: 1.395090, valid accuracy: 63.95%\n",
      "iteration:  58700, epoch: 151, train loss: 0.785978, valid loss: 1.607085, valid accuracy: 60.44%\n",
      "iteration:  58800, epoch: 151, train loss: 0.887488, valid loss: 1.607085, valid accuracy: 60.44%\n",
      "iteration:  58900, epoch: 151, train loss: 0.886395, valid loss: 1.607085, valid accuracy: 60.44%\n",
      "iteration:  59000, epoch: 151, train loss: 0.855274, valid loss: 1.607085, valid accuracy: 60.44%\n",
      "iteration:  59100, epoch: 152, train loss: 0.901069, valid loss: 1.443964, valid accuracy: 63.33%\n",
      "iteration:  59200, epoch: 152, train loss: 0.885395, valid loss: 1.443964, valid accuracy: 63.33%\n",
      "iteration:  59300, epoch: 152, train loss: 0.766158, valid loss: 1.443964, valid accuracy: 63.33%\n",
      "iteration:  59400, epoch: 152, train loss: 0.804136, valid loss: 1.443964, valid accuracy: 63.33%\n",
      "iteration:  59500, epoch: 153, train loss: 0.819327, valid loss: 1.431025, valid accuracy: 63.64%\n",
      "iteration:  59600, epoch: 153, train loss: 0.875737, valid loss: 1.431025, valid accuracy: 63.64%\n",
      "iteration:  59700, epoch: 153, train loss: 0.929848, valid loss: 1.431025, valid accuracy: 63.64%\n",
      "iteration:  59800, epoch: 153, train loss: 0.851832, valid loss: 1.431025, valid accuracy: 63.64%\n",
      "iteration:  59900, epoch: 154, train loss: 0.755639, valid loss: 1.553527, valid accuracy: 61.37%\n",
      "iteration:  60000, epoch: 154, train loss: 0.845941, valid loss: 1.553527, valid accuracy: 61.37%\n",
      "iteration:  60100, epoch: 154, train loss: 0.951187, valid loss: 1.553527, valid accuracy: 61.37%\n",
      "iteration:  60200, epoch: 154, train loss: 0.763087, valid loss: 1.553527, valid accuracy: 61.37%\n",
      "iteration:  60300, epoch: 155, train loss: 0.800603, valid loss: 1.456515, valid accuracy: 63.40%\n",
      "iteration:  60400, epoch: 155, train loss: 0.787010, valid loss: 1.456515, valid accuracy: 63.40%\n",
      "iteration:  60500, epoch: 155, train loss: 0.730955, valid loss: 1.456515, valid accuracy: 63.40%\n",
      "iteration:  60600, epoch: 155, train loss: 0.906719, valid loss: 1.456515, valid accuracy: 63.40%\n",
      "iteration:  60700, epoch: 156, train loss: 0.820411, valid loss: 1.541509, valid accuracy: 62.32%\n",
      "iteration:  60800, epoch: 156, train loss: 0.864837, valid loss: 1.541509, valid accuracy: 62.32%\n",
      "iteration:  60900, epoch: 156, train loss: 0.840311, valid loss: 1.541509, valid accuracy: 62.32%\n",
      "iteration:  61000, epoch: 157, train loss: 0.781036, valid loss: 1.487533, valid accuracy: 63.01%\n",
      "iteration:  61100, epoch: 157, train loss: 0.795575, valid loss: 1.487533, valid accuracy: 63.01%\n",
      "iteration:  61200, epoch: 157, train loss: 0.882028, valid loss: 1.487533, valid accuracy: 63.01%\n",
      "iteration:  61300, epoch: 157, train loss: 0.810313, valid loss: 1.487533, valid accuracy: 63.01%\n",
      "iteration:  61400, epoch: 158, train loss: 0.899980, valid loss: 1.538798, valid accuracy: 61.73%\n",
      "iteration:  61500, epoch: 158, train loss: 0.786667, valid loss: 1.538798, valid accuracy: 61.73%\n",
      "iteration:  61600, epoch: 158, train loss: 0.934061, valid loss: 1.538798, valid accuracy: 61.73%\n",
      "iteration:  61700, epoch: 158, train loss: 0.820955, valid loss: 1.538798, valid accuracy: 61.73%\n",
      "iteration:  61800, epoch: 159, train loss: 0.798781, valid loss: 1.525041, valid accuracy: 62.05%\n",
      "iteration:  61900, epoch: 159, train loss: 0.835711, valid loss: 1.525041, valid accuracy: 62.05%\n",
      "iteration:  62000, epoch: 159, train loss: 0.922217, valid loss: 1.525041, valid accuracy: 62.05%\n",
      "iteration:  62100, epoch: 159, train loss: 0.901692, valid loss: 1.525041, valid accuracy: 62.05%\n",
      "iteration:  62200, epoch: 160, train loss: 0.867682, valid loss: 1.619462, valid accuracy: 60.94%\n",
      "iteration:  62300, epoch: 160, train loss: 0.838590, valid loss: 1.619462, valid accuracy: 60.94%\n",
      "iteration:  62400, epoch: 160, train loss: 0.764926, valid loss: 1.619462, valid accuracy: 60.94%\n",
      "iteration:  62500, epoch: 160, train loss: 0.872908, valid loss: 1.619462, valid accuracy: 60.94%\n",
      "iteration:  62600, epoch: 161, train loss: 0.790677, valid loss: 1.564585, valid accuracy: 61.90%\n",
      "iteration:  62700, epoch: 161, train loss: 0.790794, valid loss: 1.564585, valid accuracy: 61.90%\n",
      "iteration:  62800, epoch: 161, train loss: 0.793324, valid loss: 1.564585, valid accuracy: 61.90%\n",
      "iteration:  62900, epoch: 161, train loss: 0.848674, valid loss: 1.564585, valid accuracy: 61.90%\n",
      "iteration:  63000, epoch: 162, train loss: 0.791937, valid loss: 1.421641, valid accuracy: 64.98%\n",
      "iteration:  63100, epoch: 162, train loss: 0.801335, valid loss: 1.421641, valid accuracy: 64.98%\n",
      "iteration:  63200, epoch: 162, train loss: 0.992799, valid loss: 1.421641, valid accuracy: 64.98%\n",
      "iteration:  63300, epoch: 162, train loss: 0.759119, valid loss: 1.421641, valid accuracy: 64.98%\n",
      "iteration:  63400, epoch: 163, train loss: 0.763944, valid loss: 1.545475, valid accuracy: 62.26%\n",
      "iteration:  63500, epoch: 163, train loss: 0.858780, valid loss: 1.545475, valid accuracy: 62.26%\n",
      "iteration:  63600, epoch: 163, train loss: 0.941318, valid loss: 1.545475, valid accuracy: 62.26%\n",
      "iteration:  63700, epoch: 163, train loss: 0.804585, valid loss: 1.545475, valid accuracy: 62.26%\n",
      "iteration:  63800, epoch: 164, train loss: 0.803371, valid loss: 1.445684, valid accuracy: 64.08%\n",
      "iteration:  63900, epoch: 164, train loss: 0.843307, valid loss: 1.445684, valid accuracy: 64.08%\n",
      "iteration:  64000, epoch: 164, train loss: 0.810886, valid loss: 1.445684, valid accuracy: 64.08%\n",
      "iteration:  64100, epoch: 164, train loss: 0.904185, valid loss: 1.445684, valid accuracy: 64.08%\n",
      "iteration:  64200, epoch: 165, train loss: 0.799614, valid loss: 1.531238, valid accuracy: 61.85%\n",
      "iteration:  64300, epoch: 165, train loss: 0.845061, valid loss: 1.531238, valid accuracy: 61.85%\n",
      "iteration:  64400, epoch: 165, train loss: 0.734805, valid loss: 1.531238, valid accuracy: 61.85%\n",
      "iteration:  64500, epoch: 165, train loss: 0.857537, valid loss: 1.531238, valid accuracy: 61.85%\n",
      "iteration:  64600, epoch: 166, train loss: 0.792753, valid loss: 1.506251, valid accuracy: 62.88%\n",
      "iteration:  64700, epoch: 166, train loss: 0.829132, valid loss: 1.506251, valid accuracy: 62.88%\n",
      "iteration:  64800, epoch: 166, train loss: 0.772430, valid loss: 1.506251, valid accuracy: 62.88%\n",
      "iteration:  64900, epoch: 166, train loss: 0.816096, valid loss: 1.506251, valid accuracy: 62.88%\n",
      "iteration:  65000, epoch: 167, train loss: 0.753604, valid loss: 1.483309, valid accuracy: 63.14%\n",
      "iteration:  65100, epoch: 167, train loss: 0.875304, valid loss: 1.483309, valid accuracy: 63.14%\n",
      "iteration:  65200, epoch: 167, train loss: 0.811784, valid loss: 1.483309, valid accuracy: 63.14%\n",
      "iteration:  65300, epoch: 168, train loss: 0.822922, valid loss: 1.607015, valid accuracy: 61.20%\n",
      "iteration:  65400, epoch: 168, train loss: 0.828559, valid loss: 1.607015, valid accuracy: 61.20%\n",
      "iteration:  65500, epoch: 168, train loss: 0.783595, valid loss: 1.607015, valid accuracy: 61.20%\n",
      "iteration:  65600, epoch: 168, train loss: 0.782294, valid loss: 1.607015, valid accuracy: 61.20%\n",
      "iteration:  65700, epoch: 169, train loss: 0.764848, valid loss: 1.534143, valid accuracy: 62.83%\n",
      "iteration:  65800, epoch: 169, train loss: 0.709591, valid loss: 1.534143, valid accuracy: 62.83%\n",
      "iteration:  65900, epoch: 169, train loss: 0.799226, valid loss: 1.534143, valid accuracy: 62.83%\n",
      "iteration:  66000, epoch: 169, train loss: 0.808409, valid loss: 1.534143, valid accuracy: 62.83%\n",
      "iteration:  66100, epoch: 170, train loss: 0.764474, valid loss: 1.531545, valid accuracy: 62.60%\n",
      "iteration:  66200, epoch: 170, train loss: 0.720437, valid loss: 1.531545, valid accuracy: 62.60%\n",
      "iteration:  66300, epoch: 170, train loss: 0.727839, valid loss: 1.531545, valid accuracy: 62.60%\n",
      "iteration:  66400, epoch: 170, train loss: 0.797168, valid loss: 1.531545, valid accuracy: 62.60%\n",
      "iteration:  66500, epoch: 171, train loss: 0.724884, valid loss: 1.482387, valid accuracy: 63.92%\n",
      "iteration:  66600, epoch: 171, train loss: 0.789799, valid loss: 1.482387, valid accuracy: 63.92%\n",
      "iteration:  66700, epoch: 171, train loss: 0.848832, valid loss: 1.482387, valid accuracy: 63.92%\n",
      "iteration:  66800, epoch: 171, train loss: 0.832514, valid loss: 1.482387, valid accuracy: 63.92%\n",
      "iteration:  66900, epoch: 172, train loss: 0.714060, valid loss: 1.483511, valid accuracy: 63.17%\n",
      "iteration:  67000, epoch: 172, train loss: 0.776320, valid loss: 1.483511, valid accuracy: 63.17%\n",
      "iteration:  67100, epoch: 172, train loss: 0.769310, valid loss: 1.483511, valid accuracy: 63.17%\n",
      "iteration:  67200, epoch: 172, train loss: 0.870585, valid loss: 1.483511, valid accuracy: 63.17%\n",
      "iteration:  67300, epoch: 173, train loss: 0.765701, valid loss: 1.445184, valid accuracy: 64.25%\n",
      "iteration:  67400, epoch: 173, train loss: 0.837997, valid loss: 1.445184, valid accuracy: 64.25%\n",
      "iteration:  67500, epoch: 173, train loss: 0.755112, valid loss: 1.445184, valid accuracy: 64.25%\n",
      "iteration:  67600, epoch: 173, train loss: 0.836995, valid loss: 1.445184, valid accuracy: 64.25%\n",
      "iteration:  67700, epoch: 174, train loss: 0.786118, valid loss: 1.450761, valid accuracy: 63.52%\n",
      "iteration:  67800, epoch: 174, train loss: 0.839340, valid loss: 1.450761, valid accuracy: 63.52%\n",
      "iteration:  67900, epoch: 174, train loss: 0.773102, valid loss: 1.450761, valid accuracy: 63.52%\n",
      "iteration:  68000, epoch: 174, train loss: 0.782106, valid loss: 1.450761, valid accuracy: 63.52%\n",
      "iteration:  68100, epoch: 175, train loss: 0.780025, valid loss: 1.367109, valid accuracy: 65.68%\n",
      "iteration:  68200, epoch: 175, train loss: 0.808009, valid loss: 1.367109, valid accuracy: 65.68%\n",
      "iteration:  68300, epoch: 175, train loss: 0.803545, valid loss: 1.367109, valid accuracy: 65.68%\n",
      "iteration:  68400, epoch: 175, train loss: 0.815147, valid loss: 1.367109, valid accuracy: 65.68%\n",
      "iteration:  68500, epoch: 176, train loss: 0.755240, valid loss: 1.423677, valid accuracy: 64.40%\n",
      "iteration:  68600, epoch: 176, train loss: 0.816578, valid loss: 1.423677, valid accuracy: 64.40%\n",
      "iteration:  68700, epoch: 176, train loss: 0.742112, valid loss: 1.423677, valid accuracy: 64.40%\n",
      "iteration:  68800, epoch: 176, train loss: 0.782632, valid loss: 1.423677, valid accuracy: 64.40%\n",
      "iteration:  68900, epoch: 177, train loss: 0.768120, valid loss: 1.447977, valid accuracy: 64.03%\n",
      "iteration:  69000, epoch: 177, train loss: 0.725850, valid loss: 1.447977, valid accuracy: 64.03%\n",
      "iteration:  69100, epoch: 177, train loss: 0.780832, valid loss: 1.447977, valid accuracy: 64.03%\n",
      "iteration:  69200, epoch: 177, train loss: 0.758314, valid loss: 1.447977, valid accuracy: 64.03%\n",
      "iteration:  69300, epoch: 178, train loss: 0.735175, valid loss: 1.374071, valid accuracy: 65.64%\n",
      "iteration:  69400, epoch: 178, train loss: 0.714003, valid loss: 1.374071, valid accuracy: 65.64%\n",
      "iteration:  69500, epoch: 178, train loss: 0.788357, valid loss: 1.374071, valid accuracy: 65.64%\n",
      "iteration:  69600, epoch: 179, train loss: 0.723613, valid loss: 1.454699, valid accuracy: 64.33%\n",
      "iteration:  69700, epoch: 179, train loss: 0.770292, valid loss: 1.454699, valid accuracy: 64.33%\n",
      "iteration:  69800, epoch: 179, train loss: 0.799492, valid loss: 1.454699, valid accuracy: 64.33%\n",
      "iteration:  69900, epoch: 179, train loss: 0.721081, valid loss: 1.454699, valid accuracy: 64.33%\n",
      "iteration:  70000, epoch: 180, train loss: 0.690070, valid loss: 1.375938, valid accuracy: 66.21%\n",
      "iteration:  70100, epoch: 180, train loss: 0.746207, valid loss: 1.375938, valid accuracy: 66.21%\n",
      "iteration:  70200, epoch: 180, train loss: 0.818304, valid loss: 1.375938, valid accuracy: 66.21%\n",
      "iteration:  70300, epoch: 180, train loss: 0.750984, valid loss: 1.375938, valid accuracy: 66.21%\n",
      "iteration:  70400, epoch: 181, train loss: 0.764076, valid loss: 1.394346, valid accuracy: 65.36%\n",
      "iteration:  70500, epoch: 181, train loss: 0.762663, valid loss: 1.394346, valid accuracy: 65.36%\n",
      "iteration:  70600, epoch: 181, train loss: 0.725852, valid loss: 1.394346, valid accuracy: 65.36%\n",
      "iteration:  70700, epoch: 181, train loss: 0.750340, valid loss: 1.394346, valid accuracy: 65.36%\n",
      "iteration:  70800, epoch: 182, train loss: 0.783953, valid loss: 1.488524, valid accuracy: 63.48%\n",
      "iteration:  70900, epoch: 182, train loss: 0.781747, valid loss: 1.488524, valid accuracy: 63.48%\n",
      "iteration:  71000, epoch: 182, train loss: 0.737270, valid loss: 1.488524, valid accuracy: 63.48%\n",
      "iteration:  71100, epoch: 182, train loss: 0.805465, valid loss: 1.488524, valid accuracy: 63.48%\n",
      "iteration:  71200, epoch: 183, train loss: 0.807258, valid loss: 1.407153, valid accuracy: 64.91%\n",
      "iteration:  71300, epoch: 183, train loss: 0.736724, valid loss: 1.407153, valid accuracy: 64.91%\n",
      "iteration:  71400, epoch: 183, train loss: 0.727761, valid loss: 1.407153, valid accuracy: 64.91%\n",
      "iteration:  71500, epoch: 183, train loss: 0.733034, valid loss: 1.407153, valid accuracy: 64.91%\n",
      "iteration:  71600, epoch: 184, train loss: 0.789385, valid loss: 1.494780, valid accuracy: 64.08%\n",
      "iteration:  71700, epoch: 184, train loss: 0.759029, valid loss: 1.494780, valid accuracy: 64.08%\n",
      "iteration:  71800, epoch: 184, train loss: 0.785557, valid loss: 1.494780, valid accuracy: 64.08%\n",
      "iteration:  71900, epoch: 184, train loss: 0.845525, valid loss: 1.494780, valid accuracy: 64.08%\n",
      "iteration:  72000, epoch: 185, train loss: 0.811649, valid loss: 1.392252, valid accuracy: 65.23%\n",
      "iteration:  72100, epoch: 185, train loss: 0.758820, valid loss: 1.392252, valid accuracy: 65.23%\n",
      "iteration:  72200, epoch: 185, train loss: 0.744987, valid loss: 1.392252, valid accuracy: 65.23%\n",
      "iteration:  72300, epoch: 185, train loss: 0.707425, valid loss: 1.392252, valid accuracy: 65.23%\n",
      "iteration:  72400, epoch: 186, train loss: 0.732619, valid loss: 1.357826, valid accuracy: 66.07%\n",
      "iteration:  72500, epoch: 186, train loss: 0.716820, valid loss: 1.357826, valid accuracy: 66.07%\n",
      "iteration:  72600, epoch: 186, train loss: 0.725447, valid loss: 1.357826, valid accuracy: 66.07%\n",
      "iteration:  72700, epoch: 186, train loss: 0.762246, valid loss: 1.357826, valid accuracy: 66.07%\n",
      "iteration:  72800, epoch: 187, train loss: 0.771816, valid loss: 1.417064, valid accuracy: 64.73%\n",
      "iteration:  72900, epoch: 187, train loss: 0.846725, valid loss: 1.417064, valid accuracy: 64.73%\n",
      "iteration:  73000, epoch: 187, train loss: 0.832935, valid loss: 1.417064, valid accuracy: 64.73%\n",
      "iteration:  73100, epoch: 187, train loss: 0.738946, valid loss: 1.417064, valid accuracy: 64.73%\n",
      "iteration:  73200, epoch: 188, train loss: 0.666894, valid loss: 1.385050, valid accuracy: 65.60%\n",
      "iteration:  73300, epoch: 188, train loss: 0.729997, valid loss: 1.385050, valid accuracy: 65.60%\n",
      "iteration:  73400, epoch: 188, train loss: 0.734037, valid loss: 1.385050, valid accuracy: 65.60%\n",
      "iteration:  73500, epoch: 188, train loss: 0.720424, valid loss: 1.385050, valid accuracy: 65.60%\n",
      "iteration:  73600, epoch: 189, train loss: 0.704827, valid loss: 1.368523, valid accuracy: 65.65%\n",
      "iteration:  73700, epoch: 189, train loss: 0.679851, valid loss: 1.368523, valid accuracy: 65.65%\n",
      "iteration:  73800, epoch: 189, train loss: 0.702185, valid loss: 1.368523, valid accuracy: 65.65%\n",
      "iteration:  73900, epoch: 190, train loss: 0.794930, valid loss: 1.379118, valid accuracy: 65.61%\n",
      "iteration:  74000, epoch: 190, train loss: 0.654164, valid loss: 1.379118, valid accuracy: 65.61%\n",
      "iteration:  74200, epoch: 190, train loss: 0.686780, valid loss: 1.379118, valid accuracy: 65.61%\n",
      "iteration:  74300, epoch: 191, train loss: 0.645011, valid loss: 1.334965, valid accuracy: 65.90%\n",
      "iteration:  74400, epoch: 191, train loss: 0.720831, valid loss: 1.334965, valid accuracy: 65.90%\n",
      "iteration:  74500, epoch: 191, train loss: 0.778523, valid loss: 1.334965, valid accuracy: 65.90%\n",
      "iteration:  74600, epoch: 191, train loss: 0.768928, valid loss: 1.334965, valid accuracy: 65.90%\n",
      "iteration:  74700, epoch: 192, train loss: 0.730648, valid loss: 1.392520, valid accuracy: 65.60%\n",
      "iteration:  74800, epoch: 192, train loss: 0.773523, valid loss: 1.392520, valid accuracy: 65.60%\n",
      "iteration:  74900, epoch: 192, train loss: 0.700754, valid loss: 1.392520, valid accuracy: 65.60%\n",
      "iteration:  75000, epoch: 192, train loss: 0.765877, valid loss: 1.392520, valid accuracy: 65.60%\n",
      "iteration:  75100, epoch: 193, train loss: 0.731417, valid loss: 1.357688, valid accuracy: 65.88%\n",
      "iteration:  75200, epoch: 193, train loss: 0.769566, valid loss: 1.357688, valid accuracy: 65.88%\n",
      "iteration:  75300, epoch: 193, train loss: 0.680907, valid loss: 1.357688, valid accuracy: 65.88%\n",
      "iteration:  75400, epoch: 193, train loss: 0.747392, valid loss: 1.357688, valid accuracy: 65.88%\n",
      "iteration:  75500, epoch: 194, train loss: 0.735642, valid loss: 1.413853, valid accuracy: 65.06%\n",
      "iteration:  75600, epoch: 194, train loss: 0.723853, valid loss: 1.413853, valid accuracy: 65.06%\n",
      "iteration:  75700, epoch: 194, train loss: 0.739215, valid loss: 1.413853, valid accuracy: 65.06%\n",
      "iteration:  75800, epoch: 194, train loss: 0.706040, valid loss: 1.413853, valid accuracy: 65.06%\n",
      "iteration:  75900, epoch: 195, train loss: 0.754852, valid loss: 1.436975, valid accuracy: 64.33%\n",
      "iteration:  76000, epoch: 195, train loss: 0.731073, valid loss: 1.436975, valid accuracy: 64.33%\n",
      "iteration:  76100, epoch: 195, train loss: 0.689145, valid loss: 1.436975, valid accuracy: 64.33%\n",
      "iteration:  76200, epoch: 195, train loss: 0.728868, valid loss: 1.436975, valid accuracy: 64.33%\n",
      "iteration:  76300, epoch: 196, train loss: 0.650195, valid loss: 1.363327, valid accuracy: 66.32%\n",
      "iteration:  76400, epoch: 196, train loss: 0.692103, valid loss: 1.363327, valid accuracy: 66.32%\n",
      "iteration:  76500, epoch: 196, train loss: 0.732455, valid loss: 1.363327, valid accuracy: 66.32%\n",
      "iteration:  76600, epoch: 196, train loss: 0.703529, valid loss: 1.363327, valid accuracy: 66.32%\n",
      "iteration:  76700, epoch: 197, train loss: 0.668718, valid loss: 1.340796, valid accuracy: 66.57%\n",
      "iteration:  76800, epoch: 197, train loss: 0.675081, valid loss: 1.340796, valid accuracy: 66.57%\n",
      "iteration:  76900, epoch: 197, train loss: 0.632715, valid loss: 1.340796, valid accuracy: 66.57%\n",
      "iteration:  77000, epoch: 197, train loss: 0.694008, valid loss: 1.340796, valid accuracy: 66.57%\n",
      "iteration:  77100, epoch: 198, train loss: 0.710375, valid loss: 1.311205, valid accuracy: 67.25%\n",
      "iteration:  77200, epoch: 198, train loss: 0.759964, valid loss: 1.311205, valid accuracy: 67.25%\n",
      "iteration:  77300, epoch: 198, train loss: 0.653204, valid loss: 1.311205, valid accuracy: 67.25%\n",
      "iteration:  77400, epoch: 198, train loss: 0.643440, valid loss: 1.311205, valid accuracy: 67.25%\n",
      "iteration:  77500, epoch: 199, train loss: 0.634752, valid loss: 1.346149, valid accuracy: 66.83%\n",
      "iteration:  77600, epoch: 199, train loss: 0.716207, valid loss: 1.346149, valid accuracy: 66.83%\n",
      "iteration:  77700, epoch: 199, train loss: 0.804743, valid loss: 1.346149, valid accuracy: 66.83%\n",
      "iteration:  77800, epoch: 199, train loss: 0.717428, valid loss: 1.346149, valid accuracy: 66.83%\n",
      "iteration:  77900, epoch: 200, train loss: 0.697469, valid loss: 1.399413, valid accuracy: 65.47%\n",
      "iteration:  78000, epoch: 200, train loss: 0.707964, valid loss: 1.399413, valid accuracy: 65.47%\n",
      "iteration:  78100, epoch: 200, train loss: 0.683387, valid loss: 1.399413, valid accuracy: 65.47%\n",
      "iteration:  78200, epoch: 200, train loss: 0.654608, valid loss: 1.399413, valid accuracy: 65.47%\n",
      "iteration:  78300, epoch: 201, train loss: 0.675377, valid loss: 1.331459, valid accuracy: 66.93%\n",
      "iteration:  78400, epoch: 201, train loss: 0.671734, valid loss: 1.331459, valid accuracy: 66.93%\n",
      "iteration:  78500, epoch: 201, train loss: 0.652505, valid loss: 1.331459, valid accuracy: 66.93%\n",
      "iteration:  78600, epoch: 202, train loss: 0.630687, valid loss: 1.283194, valid accuracy: 67.18%\n",
      "iteration:  78700, epoch: 202, train loss: 0.669418, valid loss: 1.283194, valid accuracy: 67.18%\n",
      "iteration:  78800, epoch: 202, train loss: 0.683756, valid loss: 1.283194, valid accuracy: 67.18%\n",
      "iteration:  78900, epoch: 202, train loss: 0.639840, valid loss: 1.283194, valid accuracy: 67.18%\n",
      "iteration:  79000, epoch: 203, train loss: 0.670460, valid loss: 1.338580, valid accuracy: 66.57%\n",
      "iteration:  79100, epoch: 203, train loss: 0.731488, valid loss: 1.338580, valid accuracy: 66.57%\n",
      "iteration:  79200, epoch: 203, train loss: 0.673708, valid loss: 1.338580, valid accuracy: 66.57%\n",
      "iteration:  79300, epoch: 203, train loss: 0.724448, valid loss: 1.338580, valid accuracy: 66.57%\n",
      "iteration:  79400, epoch: 204, train loss: 0.673721, valid loss: 1.361558, valid accuracy: 65.88%\n",
      "iteration:  79500, epoch: 204, train loss: 0.701460, valid loss: 1.361558, valid accuracy: 65.88%\n",
      "iteration:  79600, epoch: 204, train loss: 0.680079, valid loss: 1.361558, valid accuracy: 65.88%\n",
      "iteration:  79700, epoch: 204, train loss: 0.671756, valid loss: 1.361558, valid accuracy: 65.88%\n",
      "iteration:  79800, epoch: 205, train loss: 0.739607, valid loss: 1.343712, valid accuracy: 66.59%\n",
      "iteration:  79900, epoch: 205, train loss: 0.654284, valid loss: 1.343712, valid accuracy: 66.59%\n",
      "iteration:  80000, epoch: 205, train loss: 0.673996, valid loss: 1.343712, valid accuracy: 66.59%\n",
      "iteration:  80100, epoch: 205, train loss: 0.684787, valid loss: 1.343712, valid accuracy: 66.59%\n",
      "iteration:  80200, epoch: 206, train loss: 0.722342, valid loss: 1.325741, valid accuracy: 67.16%\n",
      "iteration:  80300, epoch: 206, train loss: 0.725976, valid loss: 1.325741, valid accuracy: 67.16%\n",
      "iteration:  80400, epoch: 206, train loss: 0.703685, valid loss: 1.325741, valid accuracy: 67.16%\n",
      "iteration:  80500, epoch: 206, train loss: 0.694907, valid loss: 1.325741, valid accuracy: 67.16%\n",
      "iteration:  80600, epoch: 207, train loss: 0.724594, valid loss: 1.332575, valid accuracy: 66.76%\n",
      "iteration:  80700, epoch: 207, train loss: 0.729259, valid loss: 1.332575, valid accuracy: 66.76%\n",
      "iteration:  80800, epoch: 207, train loss: 0.717753, valid loss: 1.332575, valid accuracy: 66.76%\n",
      "iteration:  80900, epoch: 207, train loss: 0.664785, valid loss: 1.332575, valid accuracy: 66.76%\n",
      "iteration:  81000, epoch: 208, train loss: 0.687293, valid loss: 1.345545, valid accuracy: 66.86%\n",
      "iteration:  81100, epoch: 208, train loss: 0.604484, valid loss: 1.345545, valid accuracy: 66.86%\n",
      "iteration:  81200, epoch: 208, train loss: 0.659542, valid loss: 1.345545, valid accuracy: 66.86%\n",
      "iteration:  81300, epoch: 208, train loss: 0.650337, valid loss: 1.345545, valid accuracy: 66.86%\n",
      "iteration:  81400, epoch: 209, train loss: 0.698074, valid loss: 1.357196, valid accuracy: 66.04%\n",
      "iteration:  81500, epoch: 209, train loss: 0.674674, valid loss: 1.357196, valid accuracy: 66.04%\n",
      "iteration:  81600, epoch: 209, train loss: 0.678087, valid loss: 1.357196, valid accuracy: 66.04%\n",
      "iteration:  81700, epoch: 209, train loss: 0.649635, valid loss: 1.357196, valid accuracy: 66.04%\n",
      "iteration:  81800, epoch: 210, train loss: 0.720314, valid loss: 1.333076, valid accuracy: 66.98%\n",
      "iteration:  81900, epoch: 210, train loss: 0.720609, valid loss: 1.333076, valid accuracy: 66.98%\n",
      "iteration:  82000, epoch: 210, train loss: 0.673523, valid loss: 1.333076, valid accuracy: 66.98%\n",
      "iteration:  82100, epoch: 210, train loss: 0.667456, valid loss: 1.333076, valid accuracy: 66.98%\n",
      "iteration:  82200, epoch: 211, train loss: 0.673820, valid loss: 1.346424, valid accuracy: 66.99%\n",
      "iteration:  82300, epoch: 211, train loss: 0.647639, valid loss: 1.346424, valid accuracy: 66.99%\n",
      "iteration:  82400, epoch: 211, train loss: 0.747987, valid loss: 1.346424, valid accuracy: 66.99%\n",
      "iteration:  82500, epoch: 211, train loss: 0.645485, valid loss: 1.346424, valid accuracy: 66.99%\n",
      "iteration:  82600, epoch: 212, train loss: 0.657919, valid loss: 1.344641, valid accuracy: 66.82%\n",
      "iteration:  82700, epoch: 212, train loss: 0.686475, valid loss: 1.344641, valid accuracy: 66.82%\n",
      "iteration:  82800, epoch: 212, train loss: 0.650258, valid loss: 1.344641, valid accuracy: 66.82%\n",
      "iteration:  82900, epoch: 213, train loss: 0.608497, valid loss: 1.311732, valid accuracy: 67.88%\n",
      "iteration:  83000, epoch: 213, train loss: 0.666896, valid loss: 1.311732, valid accuracy: 67.88%\n",
      "iteration:  83100, epoch: 213, train loss: 0.699988, valid loss: 1.311732, valid accuracy: 67.88%\n",
      "iteration:  83200, epoch: 213, train loss: 0.662510, valid loss: 1.311732, valid accuracy: 67.88%\n",
      "iteration:  83300, epoch: 214, train loss: 0.646132, valid loss: 1.318338, valid accuracy: 67.27%\n",
      "iteration:  83400, epoch: 214, train loss: 0.625126, valid loss: 1.318338, valid accuracy: 67.27%\n",
      "iteration:  83500, epoch: 214, train loss: 0.696669, valid loss: 1.318338, valid accuracy: 67.27%\n",
      "iteration:  83600, epoch: 214, train loss: 0.660055, valid loss: 1.318338, valid accuracy: 67.27%\n",
      "iteration:  83700, epoch: 215, train loss: 0.658881, valid loss: 1.324325, valid accuracy: 66.97%\n",
      "iteration:  83800, epoch: 215, train loss: 0.656362, valid loss: 1.324325, valid accuracy: 66.97%\n",
      "iteration:  83900, epoch: 215, train loss: 0.598437, valid loss: 1.324325, valid accuracy: 66.97%\n",
      "iteration:  84000, epoch: 215, train loss: 0.657452, valid loss: 1.324325, valid accuracy: 66.97%\n",
      "iteration:  84100, epoch: 216, train loss: 0.681173, valid loss: 1.283674, valid accuracy: 68.12%\n",
      "iteration:  84200, epoch: 216, train loss: 0.624002, valid loss: 1.283674, valid accuracy: 68.12%\n",
      "iteration:  84300, epoch: 216, train loss: 0.624411, valid loss: 1.283674, valid accuracy: 68.12%\n",
      "iteration:  84400, epoch: 216, train loss: 0.680814, valid loss: 1.283674, valid accuracy: 68.12%\n",
      "iteration:  84500, epoch: 217, train loss: 0.629429, valid loss: 1.286489, valid accuracy: 68.00%\n",
      "iteration:  84600, epoch: 217, train loss: 0.691426, valid loss: 1.286489, valid accuracy: 68.00%\n",
      "iteration:  84700, epoch: 217, train loss: 0.682926, valid loss: 1.286489, valid accuracy: 68.00%\n",
      "iteration:  84800, epoch: 217, train loss: 0.613585, valid loss: 1.286489, valid accuracy: 68.00%\n",
      "iteration:  84900, epoch: 218, train loss: 0.628546, valid loss: 1.333968, valid accuracy: 66.62%\n",
      "iteration:  85000, epoch: 218, train loss: 0.652019, valid loss: 1.333968, valid accuracy: 66.62%\n",
      "iteration:  85100, epoch: 218, train loss: 0.631885, valid loss: 1.333968, valid accuracy: 66.62%\n",
      "iteration:  85200, epoch: 218, train loss: 0.620349, valid loss: 1.333968, valid accuracy: 66.62%\n",
      "iteration:  85300, epoch: 219, train loss: 0.580331, valid loss: 1.315842, valid accuracy: 67.93%\n",
      "iteration:  85400, epoch: 219, train loss: 0.660297, valid loss: 1.315842, valid accuracy: 67.93%\n",
      "iteration:  85500, epoch: 219, train loss: 0.630717, valid loss: 1.315842, valid accuracy: 67.93%\n",
      "iteration:  85600, epoch: 219, train loss: 0.631112, valid loss: 1.315842, valid accuracy: 67.93%\n",
      "iteration:  85700, epoch: 220, train loss: 0.679149, valid loss: 1.307994, valid accuracy: 67.39%\n",
      "iteration:  85800, epoch: 220, train loss: 0.696094, valid loss: 1.307994, valid accuracy: 67.39%\n",
      "iteration:  85900, epoch: 220, train loss: 0.675219, valid loss: 1.307994, valid accuracy: 67.39%\n",
      "iteration:  86000, epoch: 220, train loss: 0.687500, valid loss: 1.307994, valid accuracy: 67.39%\n",
      "iteration:  86100, epoch: 221, train loss: 0.665050, valid loss: 1.304519, valid accuracy: 67.36%\n",
      "iteration:  86200, epoch: 221, train loss: 0.671850, valid loss: 1.304519, valid accuracy: 67.36%\n",
      "iteration:  86300, epoch: 221, train loss: 0.656401, valid loss: 1.304519, valid accuracy: 67.36%\n",
      "iteration:  86400, epoch: 221, train loss: 0.654291, valid loss: 1.304519, valid accuracy: 67.36%\n",
      "iteration:  86500, epoch: 222, train loss: 0.693881, valid loss: 1.340343, valid accuracy: 66.84%\n",
      "iteration:  86600, epoch: 222, train loss: 0.605197, valid loss: 1.340343, valid accuracy: 66.84%\n",
      "iteration:  86700, epoch: 222, train loss: 0.661285, valid loss: 1.340343, valid accuracy: 66.84%\n",
      "iteration:  86800, epoch: 222, train loss: 0.632267, valid loss: 1.340343, valid accuracy: 66.84%\n",
      "iteration:  86900, epoch: 223, train loss: 0.586787, valid loss: 1.285036, valid accuracy: 67.97%\n",
      "iteration:  87000, epoch: 223, train loss: 0.668173, valid loss: 1.285036, valid accuracy: 67.97%\n",
      "iteration:  87100, epoch: 223, train loss: 0.648956, valid loss: 1.285036, valid accuracy: 67.97%\n",
      "iteration:  87200, epoch: 224, train loss: 0.709260, valid loss: 1.289773, valid accuracy: 68.30%\n",
      "iteration:  87300, epoch: 224, train loss: 0.653880, valid loss: 1.289773, valid accuracy: 68.30%\n",
      "iteration:  87400, epoch: 224, train loss: 0.671748, valid loss: 1.289773, valid accuracy: 68.30%\n",
      "iteration:  87500, epoch: 224, train loss: 0.582242, valid loss: 1.289773, valid accuracy: 68.30%\n",
      "iteration:  87600, epoch: 225, train loss: 0.608729, valid loss: 1.289966, valid accuracy: 67.77%\n",
      "iteration:  87700, epoch: 225, train loss: 0.620803, valid loss: 1.289966, valid accuracy: 67.77%\n",
      "iteration:  87800, epoch: 225, train loss: 0.666752, valid loss: 1.289966, valid accuracy: 67.77%\n",
      "iteration:  87900, epoch: 225, train loss: 0.677206, valid loss: 1.289966, valid accuracy: 67.77%\n",
      "iteration:  88000, epoch: 226, train loss: 0.621198, valid loss: 1.297448, valid accuracy: 68.11%\n",
      "iteration:  88100, epoch: 226, train loss: 0.598985, valid loss: 1.297448, valid accuracy: 68.11%\n",
      "iteration:  88200, epoch: 226, train loss: 0.614900, valid loss: 1.297448, valid accuracy: 68.11%\n",
      "iteration:  88300, epoch: 226, train loss: 0.608757, valid loss: 1.297448, valid accuracy: 68.11%\n",
      "iteration:  88400, epoch: 227, train loss: 0.660917, valid loss: 1.290378, valid accuracy: 68.29%\n",
      "iteration:  88500, epoch: 227, train loss: 0.627171, valid loss: 1.290378, valid accuracy: 68.29%\n",
      "iteration:  88600, epoch: 227, train loss: 0.653679, valid loss: 1.290378, valid accuracy: 68.29%\n",
      "iteration:  88700, epoch: 227, train loss: 0.623528, valid loss: 1.290378, valid accuracy: 68.29%\n",
      "iteration:  88800, epoch: 228, train loss: 0.623249, valid loss: 1.276822, valid accuracy: 68.27%\n",
      "iteration:  88900, epoch: 228, train loss: 0.623182, valid loss: 1.276822, valid accuracy: 68.27%\n",
      "iteration:  89000, epoch: 228, train loss: 0.649454, valid loss: 1.276822, valid accuracy: 68.27%\n",
      "iteration:  89100, epoch: 228, train loss: 0.633301, valid loss: 1.276822, valid accuracy: 68.27%\n",
      "iteration:  89200, epoch: 229, train loss: 0.638527, valid loss: 1.281658, valid accuracy: 68.22%\n",
      "iteration:  89300, epoch: 229, train loss: 0.698139, valid loss: 1.281658, valid accuracy: 68.22%\n",
      "iteration:  89400, epoch: 229, train loss: 0.618353, valid loss: 1.281658, valid accuracy: 68.22%\n",
      "iteration:  89500, epoch: 229, train loss: 0.628586, valid loss: 1.281658, valid accuracy: 68.22%\n",
      "iteration:  89600, epoch: 230, train loss: 0.631491, valid loss: 1.298001, valid accuracy: 67.83%\n",
      "iteration:  89700, epoch: 230, train loss: 0.601718, valid loss: 1.298001, valid accuracy: 67.83%\n",
      "iteration:  89800, epoch: 230, train loss: 0.697432, valid loss: 1.298001, valid accuracy: 67.83%\n",
      "iteration:  89900, epoch: 230, train loss: 0.590242, valid loss: 1.298001, valid accuracy: 67.83%\n",
      "iteration:  90000, epoch: 231, train loss: 0.631733, valid loss: 1.310423, valid accuracy: 67.79%\n",
      "iteration:  90100, epoch: 231, train loss: 0.627565, valid loss: 1.310423, valid accuracy: 67.79%\n",
      "iteration:  90200, epoch: 231, train loss: 0.594867, valid loss: 1.310423, valid accuracy: 67.79%\n",
      "iteration:  90300, epoch: 231, train loss: 0.625091, valid loss: 1.310423, valid accuracy: 67.79%\n",
      "iteration:  90400, epoch: 232, train loss: 0.589063, valid loss: 1.261209, valid accuracy: 68.34%\n",
      "iteration:  90500, epoch: 232, train loss: 0.641961, valid loss: 1.261209, valid accuracy: 68.34%\n",
      "iteration:  90600, epoch: 232, train loss: 0.627153, valid loss: 1.261209, valid accuracy: 68.34%\n",
      "iteration:  90700, epoch: 232, train loss: 0.658409, valid loss: 1.261209, valid accuracy: 68.34%\n",
      "iteration:  90800, epoch: 233, train loss: 0.597573, valid loss: 1.283190, valid accuracy: 68.27%\n",
      "iteration:  90900, epoch: 233, train loss: 0.637551, valid loss: 1.283190, valid accuracy: 68.27%\n",
      "iteration:  91000, epoch: 233, train loss: 0.591724, valid loss: 1.283190, valid accuracy: 68.27%\n",
      "iteration:  91100, epoch: 233, train loss: 0.638670, valid loss: 1.283190, valid accuracy: 68.27%\n",
      "iteration:  91200, epoch: 234, train loss: 0.654035, valid loss: 1.290197, valid accuracy: 67.68%\n",
      "iteration:  91300, epoch: 234, train loss: 0.587062, valid loss: 1.290197, valid accuracy: 67.68%\n",
      "iteration:  91400, epoch: 234, train loss: 0.647478, valid loss: 1.290197, valid accuracy: 67.68%\n",
      "iteration:  91500, epoch: 235, train loss: 0.654095, valid loss: 1.283716, valid accuracy: 68.24%\n",
      "iteration:  91600, epoch: 235, train loss: 0.600935, valid loss: 1.283716, valid accuracy: 68.24%\n",
      "iteration:  91700, epoch: 235, train loss: 0.616496, valid loss: 1.283716, valid accuracy: 68.24%\n",
      "iteration:  91800, epoch: 235, train loss: 0.626205, valid loss: 1.283716, valid accuracy: 68.24%\n",
      "iteration:  91900, epoch: 236, train loss: 0.590792, valid loss: 1.291941, valid accuracy: 67.86%\n",
      "iteration:  92000, epoch: 236, train loss: 0.587361, valid loss: 1.291941, valid accuracy: 67.86%\n",
      "iteration:  92100, epoch: 236, train loss: 0.592475, valid loss: 1.291941, valid accuracy: 67.86%\n",
      "iteration:  92200, epoch: 236, train loss: 0.638754, valid loss: 1.291941, valid accuracy: 67.86%\n",
      "iteration:  92300, epoch: 237, train loss: 0.601791, valid loss: 1.289591, valid accuracy: 68.19%\n",
      "iteration:  92400, epoch: 237, train loss: 0.664353, valid loss: 1.289591, valid accuracy: 68.19%\n",
      "iteration:  92500, epoch: 237, train loss: 0.668694, valid loss: 1.289591, valid accuracy: 68.19%\n",
      "iteration:  92600, epoch: 237, train loss: 0.584677, valid loss: 1.289591, valid accuracy: 68.19%\n",
      "iteration:  92700, epoch: 238, train loss: 0.676986, valid loss: 1.266062, valid accuracy: 68.48%\n",
      "iteration:  92800, epoch: 238, train loss: 0.617932, valid loss: 1.266062, valid accuracy: 68.48%\n",
      "iteration:  92900, epoch: 238, train loss: 0.559780, valid loss: 1.266062, valid accuracy: 68.48%\n",
      "iteration:  93000, epoch: 238, train loss: 0.607826, valid loss: 1.266062, valid accuracy: 68.48%\n",
      "iteration:  93100, epoch: 239, train loss: 0.619594, valid loss: 1.280180, valid accuracy: 68.90%\n",
      "iteration:  93200, epoch: 239, train loss: 0.602669, valid loss: 1.280180, valid accuracy: 68.90%\n",
      "iteration:  93300, epoch: 239, train loss: 0.581936, valid loss: 1.280180, valid accuracy: 68.90%\n",
      "iteration:  93400, epoch: 239, train loss: 0.577192, valid loss: 1.280180, valid accuracy: 68.90%\n",
      "iteration:  93500, epoch: 240, train loss: 0.616452, valid loss: 1.268322, valid accuracy: 68.59%\n",
      "iteration:  93600, epoch: 240, train loss: 0.618739, valid loss: 1.268322, valid accuracy: 68.59%\n",
      "iteration:  93700, epoch: 240, train loss: 0.605832, valid loss: 1.268322, valid accuracy: 68.59%\n",
      "iteration:  93800, epoch: 240, train loss: 0.628871, valid loss: 1.268322, valid accuracy: 68.59%\n",
      "iteration:  93900, epoch: 241, train loss: 0.579496, valid loss: 1.275679, valid accuracy: 68.91%\n",
      "iteration:  94000, epoch: 241, train loss: 0.613946, valid loss: 1.275679, valid accuracy: 68.91%\n",
      "iteration:  94100, epoch: 241, train loss: 0.592315, valid loss: 1.275679, valid accuracy: 68.91%\n",
      "iteration:  94200, epoch: 241, train loss: 0.629838, valid loss: 1.275679, valid accuracy: 68.91%\n",
      "iteration:  94300, epoch: 242, train loss: 0.634885, valid loss: 1.259426, valid accuracy: 68.65%\n",
      "iteration:  94400, epoch: 242, train loss: 0.589631, valid loss: 1.259426, valid accuracy: 68.65%\n",
      "iteration:  94500, epoch: 242, train loss: 0.620035, valid loss: 1.259426, valid accuracy: 68.65%\n",
      "iteration:  94600, epoch: 242, train loss: 0.593020, valid loss: 1.259426, valid accuracy: 68.65%\n",
      "iteration:  94700, epoch: 243, train loss: 0.650030, valid loss: 1.259872, valid accuracy: 68.40%\n",
      "iteration:  94800, epoch: 243, train loss: 0.631494, valid loss: 1.259872, valid accuracy: 68.40%\n",
      "iteration:  94900, epoch: 243, train loss: 0.597168, valid loss: 1.259872, valid accuracy: 68.40%\n",
      "iteration:  95000, epoch: 243, train loss: 0.595813, valid loss: 1.259872, valid accuracy: 68.40%\n",
      "iteration:  95100, epoch: 244, train loss: 0.619119, valid loss: 1.255932, valid accuracy: 68.79%\n",
      "iteration:  95200, epoch: 244, train loss: 0.592712, valid loss: 1.255932, valid accuracy: 68.79%\n",
      "iteration:  95300, epoch: 244, train loss: 0.572277, valid loss: 1.255932, valid accuracy: 68.79%\n",
      "iteration:  95400, epoch: 244, train loss: 0.609862, valid loss: 1.255932, valid accuracy: 68.79%\n",
      "iteration:  95500, epoch: 245, train loss: 0.596035, valid loss: 1.254764, valid accuracy: 68.78%\n",
      "iteration:  95600, epoch: 245, train loss: 0.630879, valid loss: 1.254764, valid accuracy: 68.78%\n",
      "iteration:  95700, epoch: 245, train loss: 0.577852, valid loss: 1.254764, valid accuracy: 68.78%\n",
      "iteration:  95800, epoch: 246, train loss: 0.574604, valid loss: 1.267536, valid accuracy: 68.56%\n",
      "iteration:  95900, epoch: 246, train loss: 0.611203, valid loss: 1.267536, valid accuracy: 68.56%\n",
      "iteration:  96000, epoch: 246, train loss: 0.642985, valid loss: 1.267536, valid accuracy: 68.56%\n",
      "iteration:  96100, epoch: 246, train loss: 0.614718, valid loss: 1.267536, valid accuracy: 68.56%\n",
      "iteration:  96200, epoch: 247, train loss: 0.599954, valid loss: 1.268181, valid accuracy: 68.94%\n",
      "iteration:  96300, epoch: 247, train loss: 0.615135, valid loss: 1.268181, valid accuracy: 68.94%\n",
      "iteration:  96400, epoch: 247, train loss: 0.581436, valid loss: 1.268181, valid accuracy: 68.94%\n",
      "iteration:  96500, epoch: 247, train loss: 0.586006, valid loss: 1.268181, valid accuracy: 68.94%\n",
      "iteration:  96600, epoch: 248, train loss: 0.615624, valid loss: 1.272334, valid accuracy: 68.68%\n",
      "iteration:  96700, epoch: 248, train loss: 0.592280, valid loss: 1.272334, valid accuracy: 68.68%\n",
      "iteration:  96800, epoch: 248, train loss: 0.582071, valid loss: 1.272334, valid accuracy: 68.68%\n",
      "iteration:  96900, epoch: 248, train loss: 0.637756, valid loss: 1.272334, valid accuracy: 68.68%\n",
      "iteration:  97000, epoch: 249, train loss: 0.616667, valid loss: 1.266282, valid accuracy: 68.40%\n",
      "iteration:  97100, epoch: 249, train loss: 0.601371, valid loss: 1.266282, valid accuracy: 68.40%\n",
      "iteration:  97200, epoch: 249, train loss: 0.655436, valid loss: 1.266282, valid accuracy: 68.40%\n",
      "iteration:  97300, epoch: 249, train loss: 0.650467, valid loss: 1.266282, valid accuracy: 68.40%\n",
      "iteration:  97400, epoch: 250, train loss: 0.632131, valid loss: 1.260482, valid accuracy: 68.84%\n",
      "iteration:  97500, epoch: 250, train loss: 0.613577, valid loss: 1.260482, valid accuracy: 68.84%\n",
      "iteration:  97600, epoch: 250, train loss: 0.599538, valid loss: 1.260482, valid accuracy: 68.84%\n",
      "iteration:  97700, epoch: 250, train loss: 0.618416, valid loss: 1.260482, valid accuracy: 68.84%\n",
      "iteration:  97800, epoch: 251, train loss: 0.584793, valid loss: 1.260665, valid accuracy: 68.87%\n",
      "iteration:  97900, epoch: 251, train loss: 0.603058, valid loss: 1.260665, valid accuracy: 68.87%\n",
      "iteration:  98000, epoch: 251, train loss: 0.548370, valid loss: 1.260665, valid accuracy: 68.87%\n"
     ]
    }
   ],
   "source": [
    "import paddle\n",
    "import paddle.fluid as fluid\n",
    "from paddle.utils.plot import Ploter\n",
    "import numpy as np\n",
    "import time\n",
    "import math\n",
    "import os\n",
    "\n",
    "epoch_num = 300   # 训练周期，取值一般为[1,300]\n",
    "train_batch = 128 # 训练批次，取值一般为[1,256]\n",
    "valid_batch = 128 # 验证批次，取值一般为[1,256]\n",
    "displays = 100    # 显示迭代\n",
    "\n",
    "start_lr = 0.00001                         # 开始学习率，取值一般为[1e-8,5e-1]\n",
    "based_lr = 0.1                             # 基础学习率，取值一般为[1e-8,5e-1]\n",
    "epoch_iters = math.ceil(50000/train_batch) # 每轮迭代数\n",
    "warmup_iter = 10 * epoch_iters             # 预热迭代数，取值一般为[1,10]\n",
    "\n",
    "momentum = 0.9     # 优化器动量\n",
    "l2_decay = 0.00005 # 正则化系数，取值一般为[1e-5,5e-4]\n",
    "epsilon = 0.05     # 标签平滑率，取值一般为[1e-2,1e-1]\n",
    "\n",
    "checkpoint = False                   # 断点标识\n",
    "model_path = './work/out/ssrnet'     # 模型路径\n",
    "result_txt = './work/out/result.txt' # 结果文件\n",
    "class_num  = 100                     # 类别数量\n",
    "\n",
    "with fluid.dygraph.guard():\n",
    "    # 准备数据\n",
    "    train_reader = paddle.batch(\n",
    "        reader=paddle.reader.shuffle(reader=paddle.dataset.cifar.train100(), buf_size=50000),\n",
    "        batch_size=train_batch)\n",
    "    \n",
    "    valid_reader = paddle.batch(\n",
    "        reader=paddle.dataset.cifar.test100(),\n",
    "        batch_size=valid_batch)\n",
    "    \n",
    "    # 声明模型\n",
    "    model = ResNet()\n",
    "    \n",
    "    # 优化算法\n",
    "    consine_lr = fluid.layers.cosine_decay(based_lr, epoch_iters, epoch_num) # 余弦衰减策略\n",
    "    decayed_lr = fluid.layers.linear_lr_warmup(consine_lr, warmup_iter, start_lr, based_lr) # 线性预热策略\n",
    "    \n",
    "    optimizer = fluid.optimizer.Momentum(\n",
    "        learning_rate=decayed_lr,                           # 衰减学习策略\n",
    "        momentum=momentum,                                  # 优化动量系数\n",
    "        regularization=fluid.regularizer.L2Decay(l2_decay), # 正则衰减系数\n",
    "        parameter_list=model.parameters())\n",
    "    \n",
    "    # 加载断点\n",
    "    if checkpoint: # 是否加载断点文件\n",
    "        model_dict, optimizer_dict = fluid.load_dygraph(model_path) # 加载断点参数\n",
    "        model.set_dict(model_dict)                                  # 设置权重参数\n",
    "        optimizer.set_dict(optimizer_dict)                          # 设置优化参数\n",
    "    else:          # 否则删除结果文件\n",
    "        if os.path.exists(result_txt): # 如果存在结果文件\n",
    "            os.remove(result_txt)      # 那么删除结果文件\n",
    "    \n",
    "    # 初始训练\n",
    "    avg_train_loss = 0 # 平均训练损失\n",
    "    avg_valid_loss = 0 # 平均验证损失\n",
    "    avg_valid_accu = 0 # 平均验证精度\n",
    "    \n",
    "    iterator = 1                                # 迭代次数\n",
    "    train_prompt = \"Train loss\"                 # 训练标签\n",
    "    valid_prompt = \"Valid loss\"                 # 验证标签\n",
    "    ploter = Ploter(train_prompt, valid_prompt) # 训练图像\n",
    "    \n",
    "    best_epoch = 0           # 最好周期\n",
    "    best_accu = 0            # 最好精度\n",
    "    best_loss = 100.0        # 最好损失\n",
    "    train_time = time.time() # 训练时间\n",
    "    \n",
    "    # 开始训练\n",
    "    for epoch_id in range(epoch_num):\n",
    "        # 训练模型\n",
    "        model.train() # 设置训练\n",
    "        for batch_id, train_data in enumerate(train_reader()):\n",
    "            # 读取数据\n",
    "            image_data = np.array([x[0] for x in train_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取图像数据\n",
    "            image_data = train_augment(image_data)                                                        # 使用数据增强\n",
    "            image = fluid.dygraph.to_variable(image_data)                                                 # 转换数据类型\n",
    "\n",
    "            label_data = np.array([x[1] for x in train_data]).astype(np.int64)                        # 读取标签数据\n",
    "            label = fluid.dygraph.to_variable(label_data)                                             # 转换数据类型\n",
    "            label = fluid.layers.label_smooth(label=fluid.one_hot(label, class_num), epsilon=epsilon) # 使用标签平滑\n",
    "            label.stop_gradient = True                                                                # 停止梯度传播\n",
    "\n",
    "            # 前向传播\n",
    "            infer = model(image)\n",
    "            \n",
    "            # 计算损失\n",
    "            loss = fluid.layers.cross_entropy(infer, label, soft_label=True)\n",
    "            train_loss = fluid.layers.mean(loss)\n",
    "            \n",
    "            # 反向传播\n",
    "            train_loss.backward()\n",
    "            optimizer.minimize(train_loss)\n",
    "            model.clear_gradients()\n",
    "            \n",
    "            # 显示结果\n",
    "            if iterator % displays == 0:\n",
    "                # 显示图像\n",
    "                avg_train_loss = train_loss.numpy()[0]                # 设置训练损失\n",
    "                ploter.append(train_prompt, iterator, avg_train_loss) # 添加训练图像\n",
    "                ploter.plot()                                         # 显示训练图像\n",
    "                \n",
    "                # 打印结果\n",
    "                print(\"iteration: {:6d}, epoch: {:3d}, train loss: {:.6f}, valid loss: {:.6f}, valid accuracy: {:.2%}\".format(\n",
    "                    iterator, epoch_id+1, avg_train_loss, avg_valid_loss, avg_valid_accu))\n",
    "                \n",
    "                # 写入文件\n",
    "                with open(result_txt, 'a') as file:\n",
    "                    file.write(\"iteration: {:6d}, epoch: {:3d}, train loss: {:.6f}, valid loss: {:.6f}, valid accuracy: {:.2%}\\n\".format(\n",
    "                        iterator, epoch_id+1, avg_train_loss, avg_valid_loss, avg_valid_accu))\n",
    "            \n",
    "            # 增加迭代\n",
    "            iterator += 1\n",
    "            \n",
    "        # 验证模型\n",
    "        valid_loss_list = [] # 验证损失列表\n",
    "        valid_accu_list = [] # 验证精度列表\n",
    "        \n",
    "        model.eval()   # 设置验证\n",
    "        for batch_id, valid_data in enumerate(valid_reader()):\n",
    "            # 读取数据\n",
    "            image_data = np.array([x[0] for x in valid_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取图像数据\n",
    "            image_data = valid_augment(image_data)                                                        # 使用图像增强\n",
    "            image = fluid.dygraph.to_variable(image_data)                                                 # 转换数据类型\n",
    "            \n",
    "            label_data = np.array([x[1] for x in valid_data]).reshape((-1, 1)).astype(np.int64) # 读取标签数据\n",
    "            label = fluid.dygraph.to_variable(label_data)                                       # 转换数据类型\n",
    "            label.stop_gradient = True                                                          # 停止梯度传播\n",
    "            \n",
    "            # 前向传播\n",
    "            infer = model(image)\n",
    "            \n",
    "            # 计算精度\n",
    "            valid_accu = fluid.layers.accuracy(infer,label)\n",
    "            \n",
    "            valid_accu_list.append(valid_accu.numpy())\n",
    "            \n",
    "            # 计算损失\n",
    "            loss = fluid.layers.cross_entropy(infer, label)\n",
    "            valid_loss = fluid.layers.mean(loss)\n",
    "            \n",
    "            valid_loss_list.append(valid_loss.numpy())\n",
    "        \n",
    "        # 设置结果\n",
    "        avg_valid_accu = np.mean(valid_accu_list)             # 设置验证精度\n",
    "        \n",
    "        avg_valid_loss = np.mean(valid_loss_list)             # 设置验证损失\n",
    "        ploter.append(valid_prompt, iterator, avg_valid_loss) # 添加训练图像\n",
    "        \n",
    "        # 保存模型\n",
    "        fluid.save_dygraph(model.state_dict(), model_path)     # 保存权重参数\n",
    "        fluid.save_dygraph(optimizer.state_dict(), model_path) # 保存优化参数\n",
    "        \n",
    "        if avg_valid_loss < best_loss:\n",
    "            fluid.save_dygraph(model.state_dict(), model_path + '-best') # 保存权重\n",
    "            \n",
    "            best_epoch = epoch_id + 1                                    # 更新迭代\n",
    "            best_accu = avg_valid_accu                                   # 更新精度\n",
    "            best_loss = avg_valid_loss                                   # 更新损失\n",
    "    \n",
    "    # 显示结果\n",
    "    train_time = time.time() - train_time # 设置训练时间\n",
    "    print('complete - train time: {:.0f}s, best epoch: {:3d}, best loss: {:.6f}, best accuracy: {:.2%}'.format(\n",
    "        train_time, best_epoch, best_loss, best_accu))\n",
    "    \n",
    "    # 写入文件\n",
    "    with open(result_txt, 'a') as file:\n",
    "        file.write('complete - train time: {:.0f}s, best epoch: {:3d}, best loss: {:.6f}, best accuracy: {:.2%}\\n'.format(\n",
    "            train_time, best_epoch, best_loss, best_accu))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 模型预测"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "import paddle.fluid as fluid\n",
    "from PIL import Image\n",
    "import numpy as np\n",
    "import time\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "image_path = './work/out/img.png' # 图片路径\n",
    "model_path = './work/out/ssrnet-best' # 模型路径\n",
    "\n",
    "# 加载图像\n",
    "def load_image(image_path):\n",
    "    \"\"\"\n",
    "    功能:\n",
    "        读取图像并转换到输入格式\n",
    "    输入:\n",
    "        image_path - 输入图像路径\n",
    "    输出:\n",
    "        image - 输出图像\n",
    "    \"\"\"\n",
    "    # 读取图像\n",
    "    image = Image.open(image_path) # 打开图像文件\n",
    "    \n",
    "    # 转换格式\n",
    "    image = image.resize((32, 32), Image.ANTIALIAS) # 调整图像大小\n",
    "    image = np.array(image, dtype=np.float32) # 转换数据格式，数据类型转换为float32\n",
    "\n",
    "    # 减去均值\n",
    "    mean = np.array([0.4914, 0.4822, 0.4465]).reshape((1, 1, -1)) # cifar数据集通道平均值\n",
    "    stdv = np.array([0.2471, 0.2435, 0.2616]).reshape((1, 1, -1)) # cifar数据集通道标准差\n",
    "    \n",
    "    image = (image/255.0 - mean) / stdv # 对图像进行归一化\n",
    "    image = image.transpose((2, 0, 1)).astype(np.float32) # 数据格式从HWC转换为CHW，数据类型转换为float32\n",
    "    \n",
    "    # 增加维度\n",
    "    image = np.expand_dims(image, axis=0) # 增加数据维度\n",
    "    \n",
    "    return image\n",
    "\n",
    "# 预测图像\n",
    "with fluid.dygraph.guard():\n",
    "    # 读取图像\n",
    "    image = load_image(image_path)\n",
    "    image = fluid.dygraph.to_variable(image)\n",
    "    \n",
    "    # 加载模型\n",
    "    model = ResNet()                               # 加载模型\n",
    "    model_dict, _ = fluid.load_dygraph(model_path) # 加载权重\n",
    "    model.set_dict(model_dict)                     # 设置权重\n",
    "    model.eval()                                   # 设置验证\n",
    "    \n",
    "    # 前向传播\n",
    "    infer_time = time.time()              # 推断开始时间\n",
    "    infer = model(image)\n",
    "    infer_time = time.time() - infer_time # 推断结束时间\n",
    "    \n",
    "    # 显示结果\n",
    "    vlist = ['beaver', 'dolphin', 'otter', 'seal', 'whale',\n",
    "             'aquarium fish', 'flatfish', 'ray', 'shark', 'trout',\n",
    "             'orchids', 'poppies', 'roses', 'sunflowers', 'tulips',\n",
    "             'bottles', 'bowls', 'cans', 'cups', 'plates',\n",
    "             'apples', 'mushrooms', 'oranges', 'pears', 'sweet peppers',\n",
    "             'clock', 'keyboard', 'lamp', 'telephone', 'television',\n",
    "             'bed', 'chair', 'couch', 'table', 'wardrobe',\n",
    "             'bee', 'beetle', 'butterfly', 'caterpillar', 'cockroach',\n",
    "             'bear', 'leopard', 'lion', 'tiger', 'wolf',\n",
    "             'bridge', 'castle', 'house', 'road', 'skyscraper',\n",
    "             'cloud', 'forest', 'mountain', 'plain', 'sea',\n",
    "             'camel', 'cattle', 'chimpanzee', 'elephant', 'kangaroo',\n",
    "             'fox', 'porcupine', 'possum', 'raccoon', 'skunk',\n",
    "             'crab', 'lobster', 'snail', 'spider', 'worm',\n",
    "             'baby', 'boy', 'girl', 'man', 'woman',\n",
    "             'crocodile', 'dinosaur', 'lizard', 'snake', 'turtle',\n",
    "             'hamster', 'mouse', 'rabbit', 'shrew', 'squirrel',\n",
    "             'maple', 'oak', 'palm', 'pine', 'willow',\n",
    "             'bicycle', 'bus', 'motorcycle', 'pickup truck', 'train',\n",
    "             'lawn-mower', 'rocket', 'streetcar', 'tank', 'tractor'] # 标签名称列表\n",
    "    vlist.sort() # 字母上升排序\n",
    "    print('infer time: {:f}s, infer value: {}'.format(infer_time, vlist[np.argmax(infer.numpy())]) )\n",
    "    \n",
    "    image = Image.open(image_path) # 打开图像文件\n",
    "    plt.figure(figsize=(3, 3))     # 设置显示大小\n",
    "    plt.imshow(image)              # 设置显示图像\n",
    "    plt.show()                     # 显示图像文件"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "PaddlePaddle 1.8.4 (Python 3.5)",
   "language": "python",
   "name": "py35-paddle1.2.0"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
