{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 数据处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# 安装类库\n",
    "# !mkdir /home/aistudio/external-libraries\n",
    "# !pip install imgaug -t /home/aistudio/external-libraries\n",
    "import sys\n",
    "sys.path.append('/home/aistudio/external-libraries')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2021-03-14 14:33:48,392-INFO: font search path ['/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/ttf', '/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/afm', '/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/matplotlib/mpl-data/fonts/pdfcorefonts']\n",
      "2021-03-14 14:33:48,745-INFO: generated new fontManager\n",
      "Cache file /home/aistudio/.cache/paddle/dataset/cifar/cifar-10-python.tar.gz not found, downloading https://dataset.bj.bcebos.com/cifar/cifar-10-python.tar.gz \n",
      "Begin to download\n",
      "\n",
      "Download finished\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "image shape: (32, 32, 3)\n",
      "label value: horse\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "<Figure size 300x300 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "import paddle\n",
    "import numpy as np\n",
    "from PIL import Image\n",
    "import matplotlib.pyplot as plt\n",
    "import imgaug as ia\n",
    "import imgaug.augmenters as iaa\n",
    "\n",
    "# 读取数据\n",
    "reader = paddle.batch(\n",
    "    paddle.dataset.cifar.train10(),\n",
    "    batch_size=8) # 数据集读取器\n",
    "data = next(reader()) # 读取数据\n",
    "index = 4 # 批次索引\n",
    "\n",
    "# 读取图像\n",
    "image = np.array([x[0] for x in data]).astype(np.float32) # 读取图像数据，数据类型为float32\n",
    "image = image * 255 # 从[0,1]转换到[0,255]\n",
    "image = image[index].reshape((3, 32, 32)).transpose((1, 2, 0)).astype(np.uint8) # 数据格式从CHW转换为HWC，数据类型转换为uint8\n",
    "print('image shape:', image.shape)\n",
    "\n",
    "# 图像增强\n",
    "# sometimes = lambda aug: iaa.Sometimes(0.5, aug) # 随机进行图像增强\n",
    "# seq = iaa.Sequential([\n",
    "#     sometimes(iaa.CropAndPad(px=(-4, 4))),      # 随机裁剪填充像素\n",
    "#     iaa.Fliplr(0.5)])                           # 随机进行水平翻转\n",
    "# image = seq(image=image)\n",
    "\n",
    "# 读取标签\n",
    "label = np.array([x[1] for x in data]).astype(np.int64) # 读取标签数据，数据类型为int64\n",
    "vlist = [\"airplane\", \"automobile\", \"bird\", \"cat\", \"deer\", \"dog\", \"frog\", \"horse\", \"ship\", \"truck\"] # 标签名称列表\n",
    "print('label value:', vlist[label[index]])\n",
    "\n",
    "# 显示图像\n",
    "image = Image.fromarray(image)   # 转换图像格式\n",
    "image.save('./work/out/img.png') # 保存读取图像\n",
    "plt.figure(figsize=(3, 3))       # 设置显示大小\n",
    "plt.imshow(image)                # 设置显示图像\n",
    "plt.show()                       # 显示图像文件"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "train_data: image shape (128, 3, 32, 32), label shape:(128, 1)\n",
      "valid_data: image shape (128, 3, 32, 32), label shape:(128, 1)\n"
     ]
    }
   ],
   "source": [
    "import paddle\n",
    "import numpy as np\n",
    "import imgaug as ia\n",
    "import imgaug.augmenters as iaa\n",
    "\n",
    "# 训练数据增强\n",
    "def train_augment(images):\n",
    "    # 转换格式\n",
    "    images = images * 255 # 从[0,1]转换到[0,255]\n",
    "    images = images.transpose((0, 2, 3, 1)).astype(np.uint8) # 数据格式从BCHW转换为BHWC，数据类型转换为uint8\n",
    "    \n",
    "    # 增强图像\n",
    "    sometimes = lambda aug: iaa.Sometimes(0.5, aug) # 随机进行图像增强\n",
    "    seq = iaa.Sequential([\n",
    "        sometimes(iaa.CropAndPad(px=(-4, 4))),      # 随机裁剪填充像素\n",
    "        iaa.Fliplr(0.5)])                           # 随机进行水平翻转\n",
    "    images = seq(images=images)\n",
    "    \n",
    "    # 减去均值\n",
    "    mean = np.array([0.4914, 0.4822, 0.4465]).reshape((1, 1, 1, -1)) # cifar数据集通道平均值\n",
    "    stdv = np.array([0.2471, 0.2435, 0.2616]).reshape((1, 1, 1, -1)) # cifar数据集通道标准差\n",
    "    \n",
    "    images = (images/255.0 - mean) / stdv # 对图像进行归一化\n",
    "    images = images.transpose((0, 3, 1, 2)).astype(np.float32) # 数据格式从BHWC转换为BCHW，数据类型转换为float32\n",
    "    \n",
    "    return images\n",
    "\n",
    "# 验证数据增强\n",
    "def valid_augment(images):\n",
    "    # 转换格式\n",
    "    images = images * 255 # 从[0,1]转换到[0,255]\n",
    "    images = images.transpose((0, 2, 3, 1)).astype(np.uint8) # 数据格式从BCHW转换为BHWC，数据类型转换为uint8\n",
    "    \n",
    "    # 减去均值\n",
    "    mean = np.array([0.4914, 0.4822, 0.4465]).reshape((1, 1, 1, -1)) # cifar数据集通道平均值\n",
    "    stdv = np.array([0.2471, 0.2435, 0.2616]).reshape((1, 1, 1, -1)) # cifar数据集通道标准差\n",
    "    \n",
    "    images = (images/255.0 - mean) / stdv # 对图像进行归一化\n",
    "    images = images.transpose((0, 3, 1, 2)).astype(np.float32) # 数据格式从BHWC转换为BCHW，数据类型转换为float32\n",
    "    \n",
    "    return images\n",
    "\n",
    "# 读取训练数据\n",
    "train_reader = paddle.batch(\n",
    "    paddle.reader.shuffle(paddle.dataset.cifar.train10(), buf_size=50000),\n",
    "    batch_size=128) # 构造数据读取器\n",
    "train_data = next(train_reader()) # 读取训练数据\n",
    "\n",
    "train_image = np.array([x[0] for x in train_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取训练图像\n",
    "train_image = train_augment(train_image)                                                       # 训练图像增强\n",
    "train_label = np.array([x[1] for x in train_data]).reshape((-1, 1)).astype(np.int64)           # 读取训练标签\n",
    "print('train_data: image shape {}, label shape:{}'.format(train_image.shape, train_label.shape))\n",
    "\n",
    "# 读取验证数据\n",
    "valid_reader = paddle.batch(\n",
    "    paddle.dataset.cifar.test10(),\n",
    "    batch_size=128) # 构造数据读取器\n",
    "valid_data = next(valid_reader()) # 读取验证数据\n",
    "\n",
    "valid_image = np.array([x[0] for x in valid_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取验证图像\n",
    "valid_image = valid_augment(valid_image)                                                       # 验证图像增强\n",
    "valid_label = np.array([x[1] for x in valid_data]).reshape((-1, 1)).astype(np.int64)           # 读取验证标签\n",
    "print('valid_data: image shape {}, label shape:{}'.format(valid_image.shape, valid_label.shape))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 模型设计"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "import paddle.fluid as fluid\n",
    "from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear, BatchNorm\n",
    "import math\n",
    "\n",
    "# 模组结构：输入维度，输出维度，滑动步长，基础长度, 队列长度\n",
    "group_arch = [(3, 256, 1, 2, 1), (256, 512, 2, 2, 1), (512, 1024, 2, 2, 1)]\n",
    "group_dim  = 1024 # 模组输出维度\n",
    "class_dim  = 10   # 类别数量维度\n",
    "\n",
    "# 卷积单元\n",
    "class ConvUnit(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, filter_size=3, stride=1, act=None):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化卷积单元，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim      - 输入维度\n",
    "            out_dim     - 输出维度\n",
    "            filter_size - 卷积大小\n",
    "            stride      - 滑动步长\n",
    "            act         - 激活函数\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(ConvUnit, self).__init__()\n",
    "        \n",
    "        # 添加卷积\n",
    "        self.conv = Conv2D(\n",
    "            num_channels=in_dim,\n",
    "            num_filters=out_dim,\n",
    "            filter_size=filter_size,\n",
    "            stride=stride,\n",
    "            padding=(filter_size-1)//2,                       # 输出特征图大小不变\n",
    "            param_attr=fluid.initializer.MSRA(uniform=False), # 使用MARA 初始权重\n",
    "            bias_attr=False,                                  # 卷积输出没有偏置项\n",
    "            act=None)\n",
    "        \n",
    "        # 添加正则\n",
    "        self.norm = BatchNorm(\n",
    "            num_channels=out_dim,\n",
    "            param_attr=fluid.initializer.Constant(1.0), # 使用常量初始化权重\n",
    "            bias_attr=fluid.initializer.Constant(0.0),  # 使用常量初始化偏置\n",
    "            act=act)\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征进行卷积和正则\n",
    "        输入:\n",
    "            x - 输入特征\n",
    "        输出:\n",
    "            x - 输出特征\n",
    "        \"\"\"\n",
    "        # 进行卷积\n",
    "        x = self.conv(x)\n",
    "        \n",
    "        # 进行正则\n",
    "        x = self.norm(x)\n",
    "        \n",
    "        return x\n",
    "\n",
    "# 投影单元\n",
    "class ProjUnit(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, filter_size=1, stride=1, act=None):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化投影单元，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim      - 输入维度\n",
    "            out_dim     - 输出维度\n",
    "            filter_size - 卷积大小\n",
    "            stride      - 滑动步长\n",
    "            act         - 激活函数\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(ProjUnit, self).__init__()\n",
    "        \n",
    "        # 添加池化\n",
    "        self.pool = Pool2D(\n",
    "            pool_size=filter_size,\n",
    "            pool_stride=stride,\n",
    "            pool_padding=0,\n",
    "            pool_type='avg')\n",
    "        \n",
    "        # 添加卷积\n",
    "        self.conv = Conv2D(\n",
    "            num_channels=in_dim,\n",
    "            num_filters=out_dim,\n",
    "            filter_size=1,\n",
    "            stride=1,\n",
    "            padding=0,\n",
    "            param_attr=fluid.initializer.MSRA(uniform=False), # 使用MARA 初始权重\n",
    "            bias_attr=False,                                  # 卷积输出没有偏置项\n",
    "            act=None)\n",
    "        \n",
    "        # 添加正则\n",
    "        self.norm = BatchNorm(\n",
    "            num_channels=out_dim,\n",
    "            param_attr=fluid.initializer.Constant(1.0), # 使用常量初始化权重\n",
    "            bias_attr=fluid.initializer.Constant(0.0),  # 使用常量初始化偏置\n",
    "            act=act)\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征进行池化卷积和正则\n",
    "        输入:\n",
    "            x - 输入特征\n",
    "        输出:\n",
    "            x - 输出特征\n",
    "        \"\"\"\n",
    "        # 进行池化\n",
    "        x = self.pool(x)\n",
    "        \n",
    "        # 进行卷积\n",
    "        x = self.conv(x)\n",
    "        \n",
    "        # 进行正则\n",
    "        x = self.norm(x)\n",
    "        \n",
    "        return x\n",
    "\n",
    "# 队列结构\n",
    "class SSRQueue(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, stride=1, queues=2, act=None):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化队列结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim  - 输入维度\n",
    "            out_dim - 输出维度\n",
    "            stride  - 滑动步长，1保持不变，2下采样\n",
    "            queues  - 队列长度，分割尺度为2^(n-1)\n",
    "            act     - 激活函数\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(SSRQueue, self).__init__()\n",
    "        \n",
    "        # 添加队列变量\n",
    "        self.queues = queues # 队列长度\n",
    "        self.split_list = [] # 分割列表\n",
    "        \n",
    "        # 添加队列列表\n",
    "        self.queue_list = [] # 队列列表\n",
    "        for i in range(queues):\n",
    "            # 添加队列项目\n",
    "            queue_item = self.add_sublayer( # 构造队列项目\n",
    "                'queue_' + str(i),\n",
    "                ConvUnit(\n",
    "                    in_dim=(in_dim if i==0 else out_dim), # 每组队列项目除第一个外，in_dim=out_dim\n",
    "                    out_dim=out_dim,\n",
    "                    filter_size=3,\n",
    "                    stride=(stride if i==0 else 1), # 每组队列项目除第一块外，stride=1\n",
    "                    act=act))\n",
    "            self.queue_list.append(queue_item) # 添加队列项目\n",
    "            \n",
    "            # 计算输出维度\n",
    "            if i < (queues-1): # 如果不是最后一项\n",
    "                out_dim = out_dim//2 # 输出维度减半\n",
    "                self.split_list.append(out_dim) # 添加分割列表\n",
    "            \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x - 输入特征\n",
    "        输出:\n",
    "            x - 输出特征\n",
    "        \"\"\"\n",
    "        # 提取特征\n",
    "        x_list = [] # 队列输出列表\n",
    "        for i, queue_item in enumerate(self.queue_list):\n",
    "            if i < (self.queues-1): # 如果不是最后一项\n",
    "                x = queue_item(x) # 提取队列特征\n",
    "                x_item, x = fluid.layers.split(input=x, num_or_sections=[-1, self.split_list[i]], dim=1)\n",
    "                x_list.append(x_item) # 添加输出列表\n",
    "            else: # 否则不对特征分割\n",
    "                x = queue_item(x) # 提取队列特征\n",
    "                x_list.append(x) # 添加输出列表\n",
    "        \n",
    "        # 联结特征\n",
    "        x = fluid.layers.concat(input=x_list, axis=1) # 队列输出列表按通道维进行特征联结\n",
    "        \n",
    "        return x\n",
    "    \n",
    "# 基础结构\n",
    "class SSRBasic(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, stride=1, queues=1, is_pass=True):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化基础结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim  - 输入维度\n",
    "            out_dim - 输出维度\n",
    "            stride  - 滑动步长\n",
    "            queues  - 队列长度\n",
    "            is_pass - 是否直连\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(SSRBasic, self).__init__()\n",
    "        \n",
    "        # 是否直连标识\n",
    "        self.is_pass = is_pass\n",
    "        \n",
    "        # 添加投影路径\n",
    "        self.proj = ProjUnit(in_dim=in_dim, out_dim=out_dim, filter_size=stride, stride=stride, act=None)\n",
    "        \n",
    "        # 添加卷积路径\n",
    "        if queues==1:\n",
    "            self.conv = ConvUnit(in_dim=in_dim, out_dim=out_dim, filter_size=3, stride=stride, act='relu')\n",
    "        else:\n",
    "            self.conv = SSRQueue(in_dim=in_dim, out_dim=out_dim, stride=stride, queues=queues, act='relu')\n",
    "        \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x - 输入特征\n",
    "        输出:\n",
    "            x - 输出特征\n",
    "            y - 输出特征\n",
    "        \"\"\"\n",
    "        # 直连路径\n",
    "        if self.is_pass: # 是否直连\n",
    "            x_pass = x\n",
    "        else:            # 否则投影\n",
    "            x_pass = self.proj(x)\n",
    "        \n",
    "        # 卷积路径\n",
    "        x_conv = self.conv(x)\n",
    "        \n",
    "        # 输出特征\n",
    "        x = fluid.layers.elementwise_add(x=x_pass, y=x_conv, act=None) # 直连路径与卷积路径进行特征相加\n",
    "        y = x\n",
    "        \n",
    "        return x, y\n",
    "    \n",
    "# 模块结构\n",
    "class SSRBlock(fluid.dygraph.Layer):\n",
    "    def __init__(self, in_dim, out_dim, stride=1, basics=1, queues=1):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化模块结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "            in_dim  - 输入维度\n",
    "            out_dim - 输出维度\n",
    "            stride  - 滑动步长\n",
    "            basics  - 基础长度\n",
    "            queues  - 队列长度\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(SSRBlock, self).__init__()\n",
    "        \n",
    "        # 添加模块列表\n",
    "        self.block_list = [] # 模块列表\n",
    "        for i in range(basics):\n",
    "            block_item = self.add_sublayer( # 构造模块项目\n",
    "                'block_' + str(i),\n",
    "                SSRBasic(\n",
    "                    in_dim=(in_dim if i==0 else out_dim), # 每组模块项目除第一块外，输入维度=输出维度\n",
    "                    out_dim=out_dim,\n",
    "                    stride=(stride if i==0 else 1), # 每组模块项目除第一块外，stride=1\n",
    "                    queues=queues,\n",
    "                    is_pass=(False if i==0 else True))) # 每组模块项目除第一块外，is_pass=True\n",
    "            self.block_list.append(block_item) # 添加模块项目\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x      - 输入特征\n",
    "        输出:\n",
    "            x      - 输出特征\n",
    "            y_list - 输出特征列表\n",
    "        \"\"\"\n",
    "        y_list = [] # 模块输出列表\n",
    "        for block_item in self.block_list:\n",
    "            x, y_item = block_item(x) # 提取模块特征\n",
    "            y_list.append(y_item) # 添加输出列表\n",
    "            \n",
    "        return x, y_list\n",
    "\n",
    "# 模组结构\n",
    "class SSRGroup(fluid.dygraph.Layer):\n",
    "    def __init__(self):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化模组结构，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(SSRGroup, self).__init__()\n",
    "        \n",
    "        # 添加模组列表\n",
    "        self.group_list = [] # 模组列表\n",
    "        for i, block_arch in enumerate(group_arch):\n",
    "            group_item = self.add_sublayer( # 构造模组项目\n",
    "                'group_' + str(i),\n",
    "                SSRBlock(\n",
    "                    in_dim=block_arch[0],\n",
    "                    out_dim=block_arch[1],\n",
    "                    stride=block_arch[2],\n",
    "                    basics=block_arch[3],\n",
    "                    queues=block_arch[4]))\n",
    "            self.group_list.append(group_item) # 添加模组项目\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入的特征图像提取特征\n",
    "        输入:\n",
    "            x      - 输入特征\n",
    "        输出:\n",
    "            x      - 输出特征\n",
    "            y_list - 输出特征列表\n",
    "        \"\"\"\n",
    "        y_list = [] # 模组输出列表\n",
    "        for group_item in self.group_list:\n",
    "            x, y_item = group_item(x) # 提取模组特征\n",
    "            y_list.append(y_item) # 添加输出列表\n",
    "            \n",
    "        return x, y_list\n",
    "        \n",
    "# 分割网络\n",
    "class SSRNet(fluid.dygraph.Layer):\n",
    "    def __init__(self):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            初始化分割网络，H/W=(H/W+2*P-F)/S+1\n",
    "        输入:\n",
    "        输出:\n",
    "        \"\"\"\n",
    "        super(SSRNet, self).__init__()\n",
    "        \n",
    "        # 添加模组结构\n",
    "        self.backbone = SSRGroup() # 输出：N*C*H*W\n",
    "        \n",
    "        # 添加全连接层\n",
    "        self.pool = Pool2D(global_pooling=True, pool_type='avg') # 输出：N*C*1*1\n",
    "        \n",
    "        stdv = 1.0/(math.sqrt(group_dim)*1.0)                    # 设置均匀分布权重方差\n",
    "        self.fc = Linear(                                        # 输出：=N*10\n",
    "            input_dim=group_dim,\n",
    "            output_dim=class_dim,\n",
    "            param_attr=fluid.initializer.Uniform(-stdv, stdv),   # 使用均匀分布初始权重\n",
    "            bias_attr=fluid.initializer.Constant(0.0),           # 使用常量数值初始偏置\n",
    "            act='softmax')\n",
    "    \n",
    "    def forward(self, x):\n",
    "        \"\"\"\n",
    "        功能:\n",
    "            对输入图像进行分类\n",
    "        输入:\n",
    "            x - 输入图像\n",
    "        输出:\n",
    "            x - 预测结果\n",
    "        \"\"\"\n",
    "        # 提取特征\n",
    "        x, y_list = self.backbone(x)\n",
    "        \n",
    "        # 进行预测\n",
    "        x = self.pool(x)\n",
    "        x = fluid.layers.reshape(x, [x.shape[0], -1])\n",
    "        x = self.fc(x)\n",
    "        \n",
    "        return x"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tatol param: 20362762\n",
      "infer shape: [1, 10]\n"
     ]
    }
   ],
   "source": [
    "import paddle.fluid as fluid\n",
    "from paddle.fluid.dygraph.base import to_variable\n",
    "import numpy as np\n",
    "\n",
    "with fluid.dygraph.guard():\n",
    "    # 输入数据\n",
    "    x = np.random.randn(1, 3, 32, 32).astype(np.float32)\n",
    "    x = to_variable(x)\n",
    "    \n",
    "    # 进行预测\n",
    "    backbone = SSRNet() # 设置网络\n",
    "    \n",
    "    infer = backbone(x) # 进行预测\n",
    "    \n",
    "    # 显示结果\n",
    "    parameters = 0\n",
    "    for p in backbone.parameters():\n",
    "        parameters += np.prod(p.shape) # 统计参数\n",
    "    \n",
    "    print('tatol param:', parameters)\n",
    "    print('infer shape:', infer.shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 训练模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD8CAYAAABw1c+bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzs3Xl4VNX5wPHvO5PJvkOAkAQCCLIFMERERRBFBTfcBXHX4tLWrdpq259atRW1rUtdkeIuaK07KkUFQZF9l50QIIHskIXsM+f3x51MZrKQAIFA5v08zzyZOXeZM1d577lnFWMMSiml/IetrTOglFLq6NLAr5RSfkYDv1JK+RkN/Eop5Wc08CullJ/RwK+UUn5GA79SSvkZDfxKKeVnmg38IpIkInNFZL2I/CIidzeyj4jICyKyVUTWiEiq17YbRGSL+3VDa/8ApZRSB0eaG7krIvFAvDFmhYhEAMuBS4wx6732OR/4LXA+cArwvDHmFBGJBZYBaYBxHzvUGLP3QN/ZsWNHk5ycfOi/Siml/Mzy5cvzjTFxLdk3oLkdjDF7gD3u9yUisgFIANZ77TYeeNtYd5FFIhLtvmGcCcwxxhQCiMgcYCww40DfmZyczLJly1qSf6WUUoCI7GjpvgdVxy8iycBJwOJ6mxKAXV6fM91pTaUrpZRqIy0O/CISDvwXuMcYU9zaGRGRySKyTESW5eXltfbplVJKubUo8IuIAyvov2eM+biRXbKAJK/Pie60ptIbMMZMNcakGWPS4uJaVE2llFLqEDRbxy8iAvwb2GCM+WcTu30O/EZEZmI17hYZY/aIyGzgbyIS497vXOChQ8lodXU1mZmZVFRUHMrhyktwcDCJiYk4HI62zopSqg00G/iB04HrgLUissqd9kegG4Ax5lXgK6wePVuBMuAm97ZCEXkcWOo+7rHaht6DlZmZSUREBMnJyVj3InUojDEUFBSQmZlJjx492jo7Sqk20JJePT8CB4y07t48v25i23Rg+iHlzktFRYUG/VYgInTo0AFtR1HKfx1XI3c16LcOvY5K+bfjKvA3J6e4gpKK6rbOhlJKHdPaVeDPK6mkpKKm1c9bUFDAkCFDGDJkCF26dCEhIcHzuaqqqkXnuOmmm9i0aVOLv3PatGncc889h5plpZRqUksad48bR6oGo0OHDqxaZbVrP/roo4SHh3P//ff77GOMwRiDzdb4vfSNN944MplTSqmD1K5K/ILgambuoda0detW+vfvz6RJkxgwYAB79uxh8uTJpKWlMWDAAB577DHPviNGjGDVqlXU1NQQHR3Ngw8+yODBgzn11FPJzc094Pds376d0aNHM2jQIM455xwyMzMBmDlzJgMHDmTw4MGMHj0agLVr13LyySczZMgQBg0aRHp6+pG7AEqp49JxWeL/yxe/sH53w8HDZVVO7DYhKODg72f9u0byyEUDDvq4jRs38vbbb5OWlgbAlClTiI2NpaamhtGjR3PFFVfQv39/n2OKiooYNWoUU6ZM4b777mP69Ok8+OCDTX7HnXfeya233sqkSZOYOnUq99xzDx999BF/+ctfmDdvHp07d2bfvn0AvPzyy9x///1cffXVVFZW0twkfEop/9OuSvxtoVevXp6gDzBjxgxSU1NJTU1lw4YNrF+/vsExISEhjBs3DoChQ4eSkZFxwO9YvHgxEyZMAOD6669nwYIFAJx++ulcf/31TJs2DZfLBcBpp53GE088wdNPP82uXbsIDg5ujZ+plGpHjssSf1Ml803ZJQQ7bHTvEHbU8hIWVvddW7Zs4fnnn2fJkiVER0dz7bXXNjrSODAw0PPebrdTU3NoDdKvv/46ixcv5ssvvyQ1NZWVK1dy3XXXceqppzJr1izGjh3L9OnTGTly5CGdXynVPrWrEr8ItGXNRnFxMREREURGRrJnzx5mz57dKucdPnw4H374IQDvvvuuJ5Cnp6czfPhwHn/8cWJiYsjKyiI9PZ0TTjiBu+++mwsvvJA1a9a0Sh6UUu3HcVnib4qItdpLW0lNTaV///707duX7t27c/rpp7fKeV966SVuvvlmnnzySTp37uzpIXTvvfeyfft2jDGce+65DBw4kCeeeIIZM2bgcDjo2rUrjz76aKvkQSnVfjS7AldbSEtLM/UXYtmwYQP9+vU74HFbc0uxCfSMCz+S2WsXWnI9lVLHDxFZboxJa37P9ljV435fWe2kotrZpvlRSqljUfsK/NTV8W/KKWFzTkmb5kcppY5F7Srw20S037pSSjWjXQX+tm7cVUqp40H7Cvy0bXdOpZQ6HrSvwC+C0TK/UkodUPsK/By5Ev/o0aMbDMh67rnnuOOOOw54XHi41bV09+7dXHHFFY3uc+aZZ1K/++qB0pVS6nC0r8B/BEfuTpw4kZkzZ/qkzZw5k4kTJ7bo+K5du/LRRx8diawppdRBaWeB3+rVsy23tNXPfcUVVzBr1izPwisZGRns3r2bM844g9LSUs4++2xSU1NJSUnhs88+a3B8RkYGAwcOBKC8vJwJEybQr18/Lr30UsrLy5v9/hkzZpCSksLAgQP5wx/+AIDT6eTGG29k4MCBpKSk8OyzzwLwwgsv0L9/fwYNGuSZ3E0ppWodn1M2fP0gZK9tkNzB6SSypl6RP6iFP7FLCoyb0uTm2NhYhg0bxtdff8348eOZOXMmV111FSJCcHAwn3zyCZGRkeTn5zN8+HAuvvjiJte2feWVVwgNDWXDhg2sWbOG1NTUA2Zt9+7d/OEPf2D58uXExMRw7rnn8umnn5KUlERWVhbr1q0D8EzNPGXKFLZv305QUJAnTSmlajVb4heR6SKSKyLrmtj+gIiscr/WiYhTRGLd2zJEZK172xGvrBYaBtrWbOz1ru7xruYxxvDHP/6RQYMGMWbMGLKyssjJyWnyPPPnz+faa68FYNCgQQwaNOiA37t06VLOPPNM4uLiCAgIYNKkScyfP5+ePXuSnp7Ob3/7W7755hsiIyM955w0aRLvvvsuAQHH571dKXXktCQqvAm8CLzd2EZjzDPAMwAichFwrzGm0GuX0caY/MPMp68mSuZ7iyvIKfadBnlgQlSTJe+DNX78eO69915WrFhBWVkZQ4cOBeC9994jLy+P5cuX43A4SE5ObnQ65tYWExPD6tWrmT17Nq+++ioffvgh06dPZ9asWcyfP58vvviCv/71r6xdu1ZvAEopj2ZL/MaY+UBhc/u5TQRmHFaODkNj4b01G3vDw8MZPXo0N998s0+jblFREZ06dcLhcDB37lx27NhxwPOMHDmS999/H4B169Y1O3XysGHD+OGHH8jPz8fpdDJjxgxGjRpFfn4+LpeLyy+/nCeeeIIVK1bgcrnYtWsXo0eP5qmnnqKoqIjS0tZv81BKHb9arRgoIqHAWOA3XskG+J+IGOA1Y8zU1vq+JvLQIM2awqEuvayyxlqe0WE/pO+YOHEil156qU8Pn0mTJnHRRReRkpJCWloaffv2PeA57rjjDm666Sb69etHv379PE8OTYmPj2fKlCmMHj0aYwwXXHAB48ePZ/Xq1dx0002e1beefPJJnE4n1157LUVFRRhjuOuuu4iOjj6k36qUap9aNC2ziCQDXxpjBh5gn6uBa40xF3mlJRhjskSkEzAH+K37CaKx4ycDkwG6des2tH6puSXTCOeXVrJ7n28PmX7xkTjsdQ82azKtxs5Bif4dDHVaZqXal7aalnkC9ap5jDFZ7r+5wCfAsKYONsZMNcakGWPS4uLiDikDAgiGcMoJpMZ93kM6lVJKtVutEvhFJAoYBXzmlRYmIhG174FzgUZ7BrUWEcGGoactmyjZD6CzdSqlVD3N1vGLyAzgTKCjiGQCjwAOAGPMq+7dLgX+Z4zZ73VoZ+ATd717APC+Meabw8msMeaAPXQEcGKjxtgIpBqAGpchwGWw21qnZ097oDdDpfxbs4HfGNPsnATGmDexun16p6UDgw81Y/UFBwdTUFBAhw4dmgz+1e5GzkocBLkD/7Y8q0eLv9fp1zLGUFBQQHBwcFtnRSnVRo6bzt2JiYlkZmaSl5fX5D6llTXsK6umWkoIopocU9fQu6EkBICcveU+n/1RcHAwiYmJbZ0NpVQbOW4Cv8PhoEePHgfcx+ky/LytgPUfP8p15e9wRcUbVBAEQMaUCwAY9+Asn89KKeVv2tUkbXabMKJ3R7IDugKQLE1Pm6CUUv6qXQX+WrvtCQAkS3Yb50QppY497TLw77FbJf4eGviVUqqBdhn4K+2h5JkoLfErpVQj2mXgt4mw3XQh2eYb+F0u7b+ulFLtMvALkOHq0qCqx6kDl5RSqp0GfoEM04VOso8wrH77OcUV9P7T122cM6WUanvtM/BjVfVAXZfOlTt1CUKllIL2GvjdJX6o69Lp0moepZQC2nXg7wzUBf4abdhVSimgvQZ+hHKCyTYx9HD37KmqcbVxrpRS6tjQPgO/e/LOiohkT4m/ssbZhjlSSqljR7sO/EGdezMgyJrNs7JaS/xKKQXtNPDb3JG/PCyJkOq9hFBBlVMDv1JKQTsN/LULtVSExgPQVQq0xK+UUm7tMvBPPDkJgM5JvQCIl0LKq7WOXyml4DhaiOVgjEuJtxZaKdwOQLwUUFZV08a5UkqpY0O7LPF7RFrTM3elgNJKDfxKKQXtPfAHBFEZ3NEq8Vf6VvUYHcmrlPJTzQZ+EZkuIrkisq6J7WeKSJGIrHK/HvbaNlZENonIVhF5sDUz3lKVofF0lQL216vq0YG8Sil/1ZIS/5vA2Gb2WWCMGeJ+PQYgInbgJWAc0B+YKCL9Dyezh6I6LJ54KWRTdolPulMjv1LKTzUb+I0x84HCQzj3MGCrMSbdGFMFzATGH8J5Dkt1eFfipYDckgqfdA38Sil/1Vp1/KeKyGoR+VpEBrjTEoBdXvtkutOOKmd4V8KlgkjKfNO1jl8p5adaI/CvALobYwYD/wI+PZSTiMhkEVkmIsvy8vJaIVsWZ4TVsydeCnzTnRr4lVL+6bADvzGm2BhT6n7/FeAQkY5AFpDktWuiO62p80w1xqQZY9Li4uION1t1541MBBoJ/FriV0r5qcMO/CLSRdxzJIjIMPc5C4ClQG8R6SEigcAE4PPD/b6DVtuXX3ybKWpcOoWDUso/NTtyV0RmAGcCHUUkE3gEcAAYY14FrgDuEJEaoByYYKxO8jUi8htgNmAHphtjfjkiv+IATHgXnEYalPg17iul/FWzgd8YM7GZ7S8CLzax7Svgq0PLWusICgokhxi61gv8FdVOXC6DzSZtlDOllGob7XvkLhAfFUJ0lx7E4xv4z/z7PH719rI2ypVSSrWddh/4AUI7dmtQ1QPw3cbcNsiNUkq1Lb8I/EQm0NW2F9CePEop5R+BPyqRIKqIpaT5fZVSqp3zj8AfaQ0Ybqy6Ryml/I1/BP4oK/DX79mjlFL+yD8CfxOjd5VSyh/5R+APiwN7YIPRu0op5Y/8I/DbbBARryV+pZTCXwI/QFQiXbTEr5RSfhT4IxPoipb4lVLKjwJ/VzpLIYLv7Gy66LpSyt/4T+CPSiRQnHSk2Ce5WhdkUUr5Gf8J/E0M4iqvdrZFbpRSqs34UeC3FmS5tJdvcqUGfqWUn/GfwB9lDeK6aWAgH995Gid2jgCgolpXZFFK+Rf/CfyhHcAeBMWZpHaL4bdnnwBARY2W+JVS/sV/Ar+IVd1TZK33HhxgByAjfz/lVRr8lVL+w38CP1jVPcW7AQh2WIF/8jvLueWtpW2ZK6WUOqr8K/BHJkCxVeKPDKlbbnjhNh3YpZTyH34W+LtaJX6Xk5jQwLbOjVJKtYlmA7+ITBeRXBFZ18T2SSKyRkTWishCERnstS3Dnb5KRNp+ZfOoBDBOKM0lNqwu8EcEBRzgIKWUal9aUuJ/Exh7gO3bgVHGmBTgcWBqve2jjTFDjDFph5bFVuSel5/iLEID7Z7kMA38Sik/0mzgN8bMB5qc1tIYs9AYs9f9cRGQ2Ep5a33uQVwUZSIinuSwIHsTByilVPvT2nX8twBfe302wP9EZLmITG7l7zp4UbUl/t0+yeHBjjbIjFJKtY1Wq+MQkdFYgX+EV/IIY0yWiHQC5ojIRvcTRGPHTwYmA3Tr1q21suUrJAYCQjw9e2qFa4lfKeVHWqXELyKDgGnAeGOMp2+kMSbL/TcX+AQY1tQ5jDFTjTFpxpi0uLi41shWYxl1D+LK9Em2eVX7KKVUe3fYgV9EugEfA9cZYzZ7pYeJSETte+BcoNGeQUdVVIKnqufeMX0AqKrR+XqUUv6j2aoeEZkBnAl0FJFM4BHAAWCMeRV4GOgAvOxuMK1x9+DpDHziTgsA3jfGfHMEfsPBiUyE7T8AcPeY3izNKNSpmZVSfqXZwG+MmdjM9luBWxtJTwcGNzyijUV2hZI94KwBewABdqG6Qkv8Sin/4V8jdwE69gHjgpy1ADjsNgr3V5FXUtnGGVNKqaPD/wL/CWcDAptnAxBot5G5t5yT//pt2+ZLKaWOEv8L/GEdIfFk2GQNNwiwa48epZR/8b/AD3DiWNizCor34LA3fglKK2tYltHkgGWllDpu+Wfg7+OeemjL7CYD/z0zV3LFqz+zd3/VUcyYUkodef4Z+Dv1h6husHk2jiaqetZmFQG6NKNSqv3xz8AvAn3Og21zCZHGS/SCdUMw5mhmTCmljjz/DPxg1fPXlNNr/8oD7uZ0aeRXSrUv/hv4k88ARxi9i372JHkH+drpe6qdOrhLKdW++G/gDwiC+MF0LvNML+QT5MWTpiV+pVT74r+BH6BTPzrs34a1bABUNVK61xK/Uqq98e/A37k/Qc5S4t0LjFV7zdJZu0JXYzcDpZQ6nvl34O/UH4ATbTuBxqt1qnXKZqVUO+Pngb8fACeKtTBLY9U6WsevlGpv/Dvwh8RQ7OjEibZdgG+1jvbqUUq1V/4d+IGckJ70FSvwVzcS+LOLK/hpa35bZE0ppY4Ivw/82cE96SVZ2HFSXdOwWuehj9cyadpiKnXqBqVUO+H3gb8gtBdBUkOyZB+wB09+qU7WppRqH/w+8J9z5mgA+souXpm3jSe/3gDUzdVTK7e44qjnTSmljgS/D/xhCQMwYqOPbRffbsjhtR/SqXG6PHX8tXRpRqVUe+H3gR9HMJWRPTwNvACbckoa7JZXqoFfKdU+tCjwi8h0EckVkXVNbBcReUFEtorIGhFJ9dp2g4hscb9uaK2Mt6aq2L708Qr8q3bto/4s/bnFGviVUu1DS0v8bwJjD7B9HNDb/ZoMvAIgIrHAI8ApwDDgERGJOdTMHinVHfvSXXIJwarHz9xb7pmyoZaW+JVS7UWLAr8xZj5woAVoxwNvG8siIFpE4oHzgDnGmEJjzF5gDge+gbQJZ6cB2MQwQDIItNvIaaQhV5dgVEq1F61Vx58A7PL6nOlOayr9mOJMHA7AKbaN9I2PIK+kkhqXb9fO0sqatsiaUkq1umOmcVdEJovIMhFZlpeXd1S/OyC8Ixtc3RhuW0+XyGByiiuoqTdHjwZ+pVR70VqBPwtI8vqc6E5rKr0BY8xUY0yaMSYtLi6ulbLVMoF2G4tc/UizbSY+3EZuSWWDOXr2a+BXSrUTrRX4Pweud/fuGQ4UGWP2ALOBc0Ukxt2oe6477ZjiCBAWufoTIlWksI19ZdXsr/SdoqG0QgO/Uqp9CGjJTiIyAzgT6CgimVg9dRwAxphXga+A84GtQBlwk3tboYg8Dix1n+oxY8yBGonbhMNuY4nrRABOrFwDDKe8ui7w9+gYRoH26lFKtRMtCvzGmInNbDfAr5vYNh2YfvBZO3oCbMJeIskLPYHkkpXAcJ/tKQlRzFq7B2NMg26eSil1vDlmGnfbkoiQMeUC4gaeTXjechziW61zYpcInC5DRbXOza+UOv5p4PeWPAKpLuPsSN/258hg68FIe/YopdoDDfzeup8OwNnBWzxJg5OiCQuyAr/27FFKtQca+L2FdYBO/Uk1vwBw/7l9+OzXpxMepCV+pVT7oYG/vuQRdN+/Ggc1ZBSUATQI/F+v3cP2/P1tlkWllDocGvjrSz6DAGcFKZKOzd2BJ7y2jr+ihmqnizveW8GlL//UhplUSqlD16LunH4leQQATwwuJOH8/gB1dfxVNWTuLQdgX1l12+RPKaUOkwb++kJjofNA+leuhlAH4FvVk55XCkCwQx+WlFLHJ41ejUk+A3YtgRprtK4n8FfUeOr2o0MCWbVrH6mPz6FQp2xWSh1HNPA3JnkE1JRD1nIAQgPtiFjdOWsDv8sYXpm3lcL9VSxOL2jL3Cql1EHRwN+Y7qcBAhk/AtbI3vDAAEornZ5FWvaWVWFzT9/gNKapMyml1DFHA39jQmOhy0DYPt+TFBYUQGllNdnuwF/tNJRVWRO5OV0a+JVSxw8N/E1JHmnV81dbgT4syM7+Sic5xZU47FZJv6jc6tlTqXP4KKWOIxr4m5I8ApyVkLUMgPBgB/vKq8gvreSUHh0AWLVrHwDFFdq1Uyl1/NDA35SkU6y/u5YAEB5kJyO/DGNgXEoXLkiJ9+xa4rVIS7XTxZ8+WcvufeVHNbtKKdVS2o+/KWEdoMMJnsAfFhhAljuYd4kMpnuHUM+utSX+mUt28uDHawHILqrg3zeefJQzrZRSzdMS/4EknQKZS8AYKmusevwhSdEM6xFLTGigZ7faEv9fv9rgSdOePkqpY5UG/gNJPBnKCqAwnQ17igF48rIUIoIdxITVBf5idyNvia7Lq5Q6DmhVz4F41fM/c+UYZv+STd8uEQDEhjk8uzUW8HWBRqXUsUpL/AcS1xeCImHXYkb1ieNvl6Z41txNKF7FZTarn7/26lFKHU808B+IzQaJaZC5tMGm7iue4v8c7wJWiX/+5jzfQ3VRdqXUMapFgV9ExorIJhHZKiIPNrL9WRFZ5X5tFpF9XtucXts+b83MHxVJp0DOL1BRXJe2v4Cg7BXESCmhVFBcUc1D7t48tTTuK6WOVc3W8YuIHXgJOAfIBJaKyOfGmPW1+xhj7vXa/7fASV6nKDfGDGm9LB9liScDxpqwrddoK23rHASr106C5JNeEUJYYABxEUHklVS2XV6VUqoFWlLiHwZsNcakG2OqgJnA+APsPxGY0RqZOyYkpgHi6c8PwObZnrf3pAXjdBmy9pVz1omdPOlVzua7c1bVuLju34tZvWtfs/sqpVRraUngTwB2eX3OdKc1ICLdgR7A917JwSKyTEQWicglh5zTthIcBZ36w/YfwBhw1sC276DX2QDEOXM9u3bvWDeoq7yq+a6dm3NKWLAl3zPoSymljobWbtydAHxkjHF6pXU3xqQB1wDPiUivxg4UkcnuG8SyvLy8xnZpO4MnwI6fYP2nsGsxVBRB6nVgcxBbk+3ZLSE6xPO+vNrZ2Jl8VLj30dW8lFJHU0siThaQ5PU50Z3WmAnUq+YxxmS5/6YD8/Ct//feb6oxJs0YkxYXF9eCbB1Fw++E+CHw1QOwegbYHFaJPyqByMq6wB8XEeR5Xztl84HU3hyCA+ytn2ellGpCSwL/UqC3iPQQkUCs4N6gd46I9AVigJ+90mJEJMj9viNwOrC+/rHHPHsAjH8RyvfCyneshVqCIyEqidDy3Z7dOnkF/vJ6gf+qV39m2oJ0n7RS98AvLfErpY6mZiOOMaYG+A0wG9gAfGiM+UVEHhORi712nQDMNMZnkpp+wDIRWQ3MBaZ49wY6rnRJgTN+Z73vc571N7obwfvrAn9iTF0df1mVk7Iqa3H2GqeLJRmFPDFrg/cZPfP5Bzu0xK+UOnpaNGWDMeYr4Kt6aQ/X+/xoI8ctBFIOI3/HljPuh5BYGHKN9TkqCfv+HBzUUE0AwQ47nSKCyC2ppKi8mv4PW71/lvzpbM8pjDGICPvKqnhp3lZAA79S6ujSOoaDERAIw2+3qnkAopMQDPFSwAPnnQjAkj+N4dGL+vscll1U4XmfU2z18//bVxvYVWhN86yjfJVSR5NO0nY4oqw27x9+1RPpeYIn+ZpTutMpMpg731sBwPb8/Z5tWfvK6BIVjNNrtcYqpy7dqJQ6erTEfziircAvRZk+yYEBNs5Pieevlw4EYEdBmWdb7bz+HcLrpnWubEHXT6WUai0a+A9HZCIgsG9Xw2171tApsAqAjIK6En+VO/CXVtYN8Kq9GdQ39PE5/H32ptbLr1JKoYH/8AQEQkQXKKoX+KvLYdoY+qS/A0CGV1VPbZCvXbwlLNBOZU3jJf6C/VW8OHfrEci4UsqfaeA/XFFJsG+nb1rBNnBWElm6DYAVO/cRHmQ1p9SW+IsrahicFE1q95hGS/xOly7dqJQ6MjTwH67opIYl/oItAISU7vAk3XpGDwD2lVfzwdKdFJdXExkcQFCAjZU793HJSz/5nKKsBXP9KKXUodBePYcrKgnWfw4ul7VwC0C+VT0TWLQdMIAwvGcHYAt/n73JM3DrgpR4z2lW1Zuhs/7IX6WUai1a4j9c0UngqobSujl7akv8tqpSOmAt4NKzYxhQN1oXIDLEKvE3piVz/Sil1KHQwH+4orpZf7179uRvBrvVXbO75AAQ5q7j956XJzLYgc3W+OAtDfxKqSNFA//hcvfl99TzG2NV9SSPACBZrCeBQHfJvqK6riE3MsThU6Xzxk/bPe/Lqw+ujt8Yw7xNubi0UVgp1QwN/IcrpodVut+zyvpcmgNVJda0zWIn2ZbN4MQoAmxC/cL9eQO6+PTn/8sX6ykqs6qCDrbE/826bG58Yylv/ZxxGD9GKeUPNPAfLkewtS7v9gXW53yrfp9O/SA6iTtShA9uOxUR8ZT6AR4a15cTOoU36L2zu8iav8c78PtOeAoul2Hv/iqftPxSaw6gLbmlrfKzlFLtlwb+1tBjJOxZbc3X727YpWNviO2JoyjDM/tmoN263P3jI7ltlLUQWWmlb8l+7qZcjDE+VUA9HvqKD5bWjRV45YdtnPT4HHJL6iZ/C3Iv5lJZrfP+KKUOTAN/a0g+AzCwY6FVvx8QYk3nENsLCtKten8gyH0DiAiu60Xb0WvOHoCnv9nE5a8sZNmOQp/06T9meN4v2GItTTlj8S7+/OlajDEE2K16pKZGASulVC0N/K0hMQ2cPPrKAAAgAElEQVQCgq3qnoIt0OEEq09/bE+oLIIyK4jXlvgjgh2eQ5+7egi3jezpc7oVO/fx7iLf0cC9O4d73neLtRZ8efbbzby7aCfF5TWeZRybmvdHKaVqaeBvDQFB0G04bJ9v1fF3dE/RHOsO6IXWkotBjtrAX1fi7xAexIPj+vLc1UMO+BX7yur6/4cG+o67K66o9lQNVehMn0qpZujI3daSfAZ8/ziIDVKutNK8A3/SyV4lft/LLiJcclIC4UEBRIc6WLAln+e/s9oK0rrHUFpZw94yqzH3L1/8wpsLM3yO31tW5bkxNBb4d+8rx+kyJMWGNtimlPI/GvhbS4+R1l/jshp2AWK6WzcCd4nf4Q78kV5VPd7G9O8MWCX4Wh/dcRr3fbiKxelWddEbP2U0OO63M1Z65vwvcS/gvmFPMV2jQ4gKcXDalO8ByJhywWH8QKVUe6GBv7V0PQkCw6Gq1KrjB6sKKCoRCq1ZOmtL7fHRwQc81YgT4vj92BPp2dGq148NDWRvWRU1TazU5b3Qy96yKlwuw7jnF5DaLZqP7zz9cH+ZUqqd0Tr+1mJ3QLdTrfcd6pZhJLanp8Sf615vt1szVS6BATbuPPMExg7sAkBMWCBlVU4u/NePzWajcH8VBe4+/it27mt0n737qzjnnz8wd2Nus+dTSrU/LQr8IjJWRDaJyFYRebCR7TeKSJ6IrHK/bvXadoOIbHG/bmjNzB9zTrkNTv5V3WLsYAX+gm3grPGsrZsUc3B17dGhVtXQxuySZvetdhq25DS+X+1AsC25pWzJLeWmN5c2GBxW5NWIrJRqn5oN/CJiB14CxgH9gYki0r+RXT8wxgxxv6a5j40FHgFOAYYBj4hITKvl/ljT+xy44O++aYnDoGIfvDycC2yLEFx0jQ45qNPGRx24aqi+dbuLAAioN0dE7TxBtaN8Ac/TAcCXa3Yz+LH/sTaz6KC+Tyl1fGlJiX8YsNUYk26MqQJmAuNbeP7zgDnGmEJjzF5gDjD20LJ6nBo8Aa5+F2wBvBT4An8I+MBn6oaWGNWnE9GhDjpFBPHs1YOb3X9dljUVtMNu8ynRZxdXUF7l9An83u0DC7cVALC83uAxpVT70pIIlAB4LzGV6U6r73IRWSMiH4lI0kEe236JQL+L4I6fqDrxIiaHzoXK5qtsvNltwo9/OIvZ94wkKqTxHkHe1mVZJfbAAJtnYBfA6L/P44ynvyevpC7w7yzc75nRM8I9dfR+nRJaqXattRp3vwCSjTGDsEr1bx3sCURksogsE5FleXl5rZStY4jNTuCIu7FVlcKaDw768PCgAGLCAhsEfodd+PK3I3zS0t2LuxeVVzfo/plfWkV+aSWRwQGIwKbsUoY+MYenvtnomSnUuzupUqr9aUngzwKSvD4nutM8jDEFxpjaYuQ0YGhLj/U6x1RjTJoxJi0uLq4leT/+JKZB/BBY8rpn/h62zIHsdS0+RYjDtwfu0O4xDEyI8nyuPzjsmdmbGpwju6iChJhQ4iODefWHbewtq+aVedt4b7E1TURecWWDY5RS7UdLAv9SoLeI9BCRQGAC8Ln3DiIS7/XxYmCD+/1s4FwRiXE36p7rTvNPIjBsMuRthIwfYdEr8N4V8OW9LT5FbWBP6261kd/unuVzxq+G8/3vRnHTackADEyIbPR4gHW7i+kYHkhCTOONzLVTQwNk7Stn977yRvdTSh2fmh3AZYypEZHfYAVsOzDdGPOLiDwGLDPGfA7cJSIXAzVAIXCj+9hCEXkc6+YB8Jgxxr9bDgdeBv/7E3x6h7VqV0gMZC2zpnQOab7DU1JsKK9em8qpvTr6VPuc2qsDAPee04fxJyWwKbuEO99b0eg58koqGdOvM/sra4C9DbbnepX47/1gFXYRZkwefpA/VCl1rGpRHb8x5itjTB9jTC9jzF/daQ+7gz7GmIeMMQOMMYONMaONMRu9jp1ujDnB/XrjyPyM44gjBFKvt4J+/0vgqnesaR7Sf2jxKcYOjG+ykVdE6BUXzriBXfj5obOaPMcJncI93UovS03gkiFdPdv2lVezYEse98xcyZLthWzILubb9Tm88dP2JkcPH0hZVQ1/+mQthfUWj1FKtQ2dsqEtjHwAOg+EAZdZn4MiYdt3MOCSVvsKESE+KoTBSdGs3rWPVyalsmBrPu+76/F7xYV5unqGBtq5ZEgCn67aDcC+siqenbPZM/J3X1k1d7y3nGqnwRgYnBTF56t2szG7hH9NPIlOkb7jDD5ekYndJowfYnXg+nFLPu8t3smwHrGMH5LAtAXpnJwcy+Ck6Fb7vUqpltPA3xaCImDQVXWfe4yErd9bDb5Sb2He/QUQ1uGQv+rtm4cxe102Ywd2YVxKPB8u3UWNy9ArLpyt7mUaBaFTRF3wdhnYnOO7hGO107pJ/HdFJo99ud6T/p/lmfx6tDVFxZNfbyC/pIr/rsgE8AT+X3Zb4wqyiypwuQxPzLKagHTSOKXahs7Vcyw44WwozoT8zb7pq2bAMz1h5buHfOqoEAdXnZyEuG8o/7n9VCYO60ZCdAgXD+5Kn87h3DKiB50ig3yO814EvlZyh1BPEK9VXuXEGMNDH6/ltR/SPUHfW+0xe4oqPLOHgq4WplRb0cB/LOh1tvV32/d1aXtWw5f3AAJzHoEK9zQK1RXw43NQ1DDAtsRJ3WJ48rIUbDahU2Qw/7t3FMkdwwh22BnVJ45LT2p6fN1vzupN13rTR2zNLWVPUQUzluxssH+108VT32xk/mZrXEZ2UQWFZXX1/Cf++Rtuf2c5v+wuatFNoHB/FbsKy5rdTyl1YBr4jwUx3a0ZPbd+Z30uK4QProPQDjDpIygrgB+eBmcN/PcW+PYR+PB6cLbuQKu3bh7GpFO6eT4/eVkKt47o4ekamhgTwvBevtVOW3JL2Jjt+xRQa+G2Al6Zt80zOd2e4grP1NS1vvklmwte+JE/ftz8WIbznpvPGU/P9UmbuymXsqqGTydKqaZp4D9W9Drb6tv/2a/h1TOgZA9c9Tb0HgOp18HiV61gv/FLGHg5ZC2H75+wjq2phLUfQebyuoFhh6h2JlCAUX3i+POF/enTOQKALpHBDHIPFvvT+f24d0wf0vP3exaJAas6aEy/TgB8vXaPJz0hOoTsonL2NtGzZ8767GbzVjvVRO0qY5l7y7jpjaXc/5/VDfYt3F/F5kZmKV2TuY9Vuxqfrlopf6GB/1jR70KoKYeNsyB+MFz9njXSF+Csh8ERCptmwag/wBXTYeiN8NNz8N1j8K+h1pPAtLPgldNh+ZuHfAOICgn0vK/t7jkkKZqwQDtdooKZNLw7D1/Yn+tO7c4pPWMxBl6bn05iTAjf3HMG8x4YzUWDra6hM5fWTdN0Tv/O5JZUsruootHvrfLqJvr9xhwuf2Uh67KKuHvmSt5bvMNn353u6p4c93iDr9Zm8/uPfIP/hS8s4Nxn5zeYdvriF3/ikpd+OqhrolR7o716jhU9RsLvNkNYHNjq3Y/D4+Cy16FgK5z6ayvtvCdh5yJY8A9rGogL/mk1EC9/E764G6r21+17EGpL/KP61E2bcc2wbowd2IVghx2Am0f0AKwbQoBNqHEZbjwtmb5drCqhmNBAn3P+86rB2G3Cmwvh/z5tvEqnotrFX2et5/KhiXy5eg/Ld+zlsS/Xs2R7IZ+t2k1wgN2z7/b8/fTpHEG2103kw2WZpCREMSgxmpSEKM8NJrekks7u7qa1k9Ep5e808B9LIjo3ve3EerNZB4bCtR9D3garmqi2G2jqjfCf62H2nyC6u/UkUZ+zGub/HVa9B5dNhe6neTY57DZ+eOBM4qPqpnMIsNus7p77dkHxbuh2CgDBDjuf/vp0Fm8v5Lrh3T37ewf+4T1juSw1kTWZzVevvL5gO+8u2km1u/S/ZHtdFdLvvKpztrsnodtT5DuVxP999gsAn/26brnJtZlFBCfbqXG52JJb10X1o+WZRAQHcN6ALj7nMMawNGMvJyfHeHpCqebd9s4yhvfswE2n92jrrKgW0Kqe41lUApwwxrfvv80Gl06FhFT4762wc7HvMTnr4fWz4IcpUFkMMyZYaV66dwhruGaAMfCfG+Cti6Akx5M8MCGKW0b0sPY3BozxPDWceWIc79xi3SSSO4Y1/3NCHIQE2qlppmS+Yoc1zUROsVWqf2z8AJ/t7yyqqxq69e1lnP3PeYx6Zh4Tpi7ypN//n9Xc9s7yBuf+cNkurnrtZ75e59vmsGFPMQ99vIaqmroqqV92F5H84CxW7vSd9iK/tNLzdJFTXMEpf/uWez9YRUW1s8HNqr55m3IpqTc76n+XZ7IoveCAx7Ull8sw+5cc/vLF+uZ3VscEDfztUWAoTJxpPUG8dSEsfwtcTvjxWZg6yiq1X/0u3P6j1Xbw7mWw16se3Rhr1tDaXkZgtT1kLQdnJSx6yff7nDWw+DV4ugc8O4Ck+ffzyZgSXp6UisNu/S8WGWzdDE7sHMH5KVYp++EL6xZye+C8E1n00NmeIJ7iNePohJOT6B9vVSNFBAWweHshc9bn8PqC7QBcf2qyT3Y+Wp7JVWmJdAiznjzyS6saHZcAkJG/n9vfWc66rCJcLsOmbOup4M73VjD7l7rg/+TXG5mxZBcvzd3qaTf4YrXVeP38d1vo8dAstuaWUl7lJO2Jb7l++hIA3lu8k5ziSj5ZmcUZT8/l1Ce/97l5eLdB5JZUcOMbS3ny640YY3C5DMYYfvef1T43rfq+WrvnsKfD2FdWxdKMpqfRWpdV5PME5i2v9BiZzTVrBWSvbetcHBe0qqe9Cu8Et35vNfp+cRfMf8aaH6jfxXDhsxDW0drv2v/C9HHw0jDoPx56nglLp1lBXuxwzQfQ6yyrB1GH3tB5ACz9N5x+D4TGwu5V1oRzueutdoqQGNg4i5Mq3oMenaHXaE+Wlv15DGGBATwxyyoZOuzC+SldyNpb7hn9e+GgrnSKCMbpMkx8fRG/GX0C9593Iq/PT2f9nmIuH5rImwsz+NXbywA8cww9d/UQHvx4jWd5ydN6dSQowM47i3Zw2UkJbMopoU/nCPrFR/C3rzxTSXHRv36kpLKGFTv3clqvDp5pKwBue2c5H952Kv3iIyhwB7fnv9uC3SZMGJZEbon1xDFvkzVO4c+fruWCQVZ+ftyaT/KDswBI7RbN2qwiT6+kzTklFFdUE+yw83+frqNbbCh3nd3bE7zfX7yTT1dmkRRYymu3nefJjzGmQfVTel4pd763gjH9OvGX8QOZuzGXSad04x//20zvzuH0j4+kYH8VReXVDaq1vL02P53XftjGsj+fQ2xYXVXdOz9nsDG7xDNld/3R1gWllZzyt+9oc84amHkN2B1w1yqw2Zs/xo9J/V4Px4K0tDSzbNmyts5G++BywrwnYfVMGPOo1RW0ft113mZY/IrVJbSyGKKS4Iz7rIbi/K1w8i2w8AW48k3o2AdeOQ3O/CP0HAXvXmEtLj/uaeh7gXXu6gp4ebj1j/D2nyDAt7H3m3V7uP3dFXxx+1BSSn+CvhdCgO/IYYDF6QWcnByLzSYYY1i+Yy9Du8fw3YZcPl6Zyc2n9yC1Www2r7WFa4Ptgt+PpkN4IP9dnsmEYd08Tx7peaWc9Q/fCfFG9onzDDJrzICukWzMLmHisCTeXdRwoFpT4tjHlfZ59Ln4ARbuKufDZdaguz9f0I8nZm1gSsBUUm1beMt5Hh85R1JJ3XUaY1vOq45n2dRpHBfsugYQhnaP4b93nMaqXftI7hDK1txSrnj1ZwCCHdbvq6h2ER8VzJ5Gek8teuhsnMbQMTwQY/A01u+vrOH66UtYvmMvL09K5fyUulnWa69nrdWPnMuL32/hmlO606NjGJ+tyuLumas82zc/Ma5BNWGN00WAvfnKhdpYdEhtKxtnWYEfYMIM6Hv+wZ/jOCciy40xaS3aVwO/8qgqgz2rIGGoFYhLsmHaGOtJIX4w/Gqe1YYwYyJk/ASuGoiMh+s/t9obvG2eDe9fBec8Dqff1eCrisuriJx1B6z7CNJutp5CDldlKRkz72N+SQLX/frhRgNIjdPFfR+uZvLInkxbkM6nq3az/M9jePjzX3DYhP+tz+Gus3tz/and6f9w3dIRNoE5943imtcXkVNciR0nVwQs4A7bZ3zsPIMXnJf5fM+tpycxdvlk0thA1eDrCBj/LxZtL+BXby1jf5WTc2zLeD3wn+wxscRLIQUmgldqLuYd5zn8edB+Jmy5l32uYOKkmKerr+Zlp+8y1yd0CqdzZBA/bW287n/isCSWZuz1zMfk7dSeHVibVcQ1p3Sj66rnGVrxMz+4BjPXOYRzB3bhtiGhEJXIvthBDHlsjs+xZ/TuyIIt+VwxNJG/XzmYV+Zt46lv6p6gFvx+NEmxoVZ14dJpzF6bxV3bh3F+SlfuO6cPS7YXsmLnXjqEB3HvmN6ICFtzisFVzVXTVjDh5CT2llVT43QxLqULZ/XtzKL0AlISoghzLw3qdBke/WwNI/t0Zkz/zmTtKyfxqxut0e42O3Tsg7nuE4oranxmsV2+Yy9dooKJjwz2KSwcqtonNO8npPpqnC6cxhAUcOSfQDTwq9aTuwE+uR3GPlnX+ydzGUw7G+L6WkG/qd5I70+AjAXw6yUNbww/PmeNQO6cAjlr4fJ/Q8oVdduNgW8ftbqsnn43nDiu7kll7w5rNtMdC60bUuoNUF1m3Wj2uHv/jLgXzn6k4dONF6fLUFiQQ9zO2TDgUuvJxcvU+dtYsWMfoYF2To8r5/LqL9mzJ4sftxYw2LaNPrYs9tujCakpYtP5M3ElnUpuSSWFpVVcXvw2/PAUJA2HXYtgwvvQ9wKenbOZ6d+t5vuQ32MP68CUpFd5NLWMpW//iVH2NeRKRzo5KiEqgVF5D3Cv899cYl/I5qGP8OLPBXSWQn50pbDBWL2ohvWIJTwogO835nJ5aiLDesTQMTyIs6vmYQq38ebubjy/NoAx9hVcavuRQiK4v/p2KglktG0lbwQ+w05XHF2lgACpa3twYeO5pOd5YUvTEwTW3gQAuncIZUdBGSP7xPHgeSfCd4/QP92ahf39mrP4v5qbcOIb/P418SRS4wPZ8uLlDJatPFB9G9+6hnq2BwbYeOumYUx83WrfOK1XByYNS2Teh89zf8CHrDG9WTj0n3zz8yoWhtyNjLiPEmcAEQun8NKAGTy7Eh4bP5DzU7owdX46L8/b5jn3Q+P6cpt7EaPGfLchh2/WZRMZ4mBE7450ighiytcbeeryQThdhiqni7P/8QOdI4N49LRAzsh+h88ZxdyqvoQ47Dx0ssDqmdyVPoy99g7MuXdk008yaz+y/m1Fdm18ewtp4FdHXsZP0Ln/gRePKUyHl4aDs8qqPortYVUVhcRYbQ4DLrHGJ7x5odUoN3kexPWxjl3wD2twWnA0VOyzprEODIeCLdYUFmBNaVFWAEFRVoN2RTFcPg22zLaqqQZPhIteqKtqylphnTPpFKuba9YK+O4v1jm6nWa1dwSG1uXfGOs3LJtuLZeJgfAuGFwUmkjCxvwe02M0jmmjCMAFd/xk3TzS58Hbl7i//3nrJlmcBbf/iCs4htwP76bztv8gt35rPV1hTYrn2LEAx9zHsFXugxtnsWF/OFJTQd/ZkyBzSV22xM7GE27h+043csuZfbGJsKNgP73dI6yZ/0zdqG4vBY4uxFbnsNoxmOeCbucfJQ8QGN2VsF//wDer0nGm/8h/1+RTYCJ5yfE8BuGRrlOZl1HGZakJ/LQ1n6Lyam4f1Yvnvt1CTKiDvWXVOOzC2kfP45nZm/h0xS7uqZrKdQHf8k7NGCoCIvgVn7A4cDj3l1zNLlNXSIhkP28GPcNgtpBhutDLtofpNWP5NvFOUnt04cW5W4mPCia3aD8XdykgJn85l9kWMNCWwTZXPL1se5hWM44SE8rdjo95vOf7fL5+HwuDfstM52j+VjOJcbYl1ASE8WXVSZ7vvcU+ixG2dbyb9Bi79gup3WJYlb6HS3saFhXHsnzHXoorfDsDBAbYfBrlO0UEkVtSSYqk82bgU3QQa5T4t86TKCKcS2w/YhfDt86TuLX6ft69ZTg/bMrhyg7prCuP5eQhJ5EUbuCr38Oqd1nSZSIn/eplsosqrCemQ6CBXx07di2Frd9aAbRgK+RvgaoSq6R+09cQGAZFWfDaGVbV0eBrrOqjOQ9DypUw/mVY+6EVeAPDrDmNOg+AnqOhY2/YvRJ+eh5yfrGCftchVsCe/3eY+4TVWH3VO9YN4+1LAQOVJdZfgG6nQp+x1tPFCWPgyjdg21xY/6l1cyvNBrFZ+TrzQYhOauQ3LoHp50HyGdYYiZ0/W3n71VwICoe8TfDaSKjxqnc/9Tdw3l8bv2b1p+eu2g+7lrC4IJDOcZ1IXv2sNQajYx8rT/0vqWvMXPBP62aWchWMe4qajIWsX7mQ/iMuJqDbKVZbz2d3YiSAamzI5Hk4uvQDrAn3xvzzB353Th+cGQu5e9fd1Ay5norUWwlb/CySPs+6EYd1xEQlYutwAt8VdkD6jOWsQcmwv4CyD24mdOc8vgi/km2DH2B0384M3vMfzNe/R4yL0pj+rHMl0zWkBnvOGjqTz84zX2Ds12E8FPA+NwXMxkQlUX76g4z82MYk+7fcFvI9oTVWl9kddGVpj9s4adwtrJl2B5dWfUGZCWKZqw/XVz8EwL+CX2OsbTH2oDBs5QVUY+eSysdwdh7EXSnVnDP/ShziZK4ZygsdH2FXVhZvBj7FQFsGnzpPY0r1RJxBUdySkEnerk2E9x3Nqoqu7C6q8IwFCaaS33XfzsTsp9lrIri1+neMDVrLbx2f46qu5H3OIzYqmouL3uGemrv4tGY4d9o/4/eODwBY5upDR9t+urGbF2vG83zN5QQFBhIZ7OC7343yVGsdDA386thlDJTmuEcoez365/xilfLXfw6uaugxypqgLqDp+tNmrXzP6tHUobfVhTUkGm78EgKCYdPX1pNHv4usIFs74tkWYN2AwuKsm0u34VbPpNieB/6uuX+zqnbi+llPMkNvhAivXjQ7fobt860G79BY62mgkQbtFtvyLcz+I+Rvsm4AUUmQs866tilXwqWvNd2zZd1/4fO74Py/w5CJPpvKq5yEBLqP+9//WY36YD1t9bvY6s67P8+qbivaZa0eFxwNg66GTV9Z3z92itVu433z2rsDNnwBGz633gdHQkgsnPkH6HUWU+dv45fdxTw/rMiajXZPXYOxs/dY7IOuhG6nYtzVISJi9eSZMQG2zuEf0X9iZfgoLjkpgQvi8gl57yKrl9lJ12K+vBeCo5DJ8+Dt8VTmbOKF/efygONDGHwNVdt/wlGWS17PS4jb9rGVb5cTcXmNp4jqhrPDCazPqaC0eB+pti0ESTX5ob1YOmIab6+r4tGLB3BiDFaHipBo6++0MdQU7mBhpwmM3PkS8xwjWedM4orAn3E4y3k54i7+vduqtgsLtPOX8QO5YmjiIf0voYFfHb9Kc60nhH4XWQvWHK6t38GHN1jB9sYvIbpb0/sue8Na/3jAZdaNx34QpS5jrMbwyPjm920tLhds+AwW/suqTusyyJrf6aTrm8+7y9l8l8eaSvjsNxCTDMPvsK5h/e27lljdfzd+CRFd4aq3rMGDh8MYWP+Z1V4zeGJd9V9jKkutqrUTz2841UmtLd/Ce5db1yd7DWb8y2zpejF9Vv4NFr1sFQCu+Q8knQx7M6wnyMAwa0R8bE/r/Fv+Z/33dVZRXOkiN3YoJ5x6sfWUd6AbePZamHqmVZjoMw5z1du4bA7s7sZlp8uwKL2AYT1isYscVqOzBn6lvJVkW2sdB0c1v686NOV7rcGAh/MUcyR9ea/VVpN8BtzwhadUz9Jp1jiVjr2P3Hcvfg0yl8LFL4IjuPn9D5EGfqWU8lZZCgv+DkNvsta/aIcOJvC3aMoGERkrIptEZKuIPNjI9vtEZL2IrBGR70Sku9c2p4iscr8+b/nPUEqpVhIUbg1gbKdB/2A1W4kpInbgJeAcIBNYKiKfG2O8Z2RaCaQZY8pE5A7gaeBq97ZyY8yQVs63UkqpQ9SSEv8wYKsxJt0YUwXMBHyGERpj5hpjahdDXQQcWrO0UkqpI64lgT8B2OX1OdOd1pRbgK+9PgeLyDIRWSQilxxCHpVSSrWiVp2dU0SuBdKAUV7J3Y0xWSLSE/heRNYaY7Y1cuxkYDJAt24H6HKnlFLqsLSkxJ8FeA9XTHSn+RCRMcCfgIuNMZ4Juo0xWe6/6cA84KT6x7q3TzXGpBlj0uLi4hrbRSmlVCtoSeBfCvQWkR4iEghMAHx654jIScBrWEE/1ys9RkSC3O87AqcDukyPUkq1oWareowxNSLyG2A2YAemG2N+EZHHgGXGmM+BZ4Bw4D/uGeh2GmMuBvoBr4mIC+smM6VebyCllFJHmQ7gUkqpduC4H7krInnAjmZ3bKgjkN/K2Tne6DXQawB6DcD/rkF3Y0yLGkiPycB/qERkWUvveO2VXgO9BqDXAPQaHEiLpmxQSinVfmjgV0opP9PeAv/Uts7AMUCvgV4D0GsAeg2a1K7q+JVSSjWvvZX4lVJKNaPdBP7m1gw43ojIdBHJFZF1XmmxIjJHRLa4/8a400VEXnD/9jUikup1zA3u/beIyA1e6UNFZK37mBdE5NDXfDsCRCRJROa613n4RUTudqf70zUIFpElIrLafQ3+4k7vISKL3fn+wD2iHhEJcn/e6t6e7HWuh9zpm0TkPK/04+LfjYjYRWSliHzp/ux316BVGWOO+xfWiOJtQE8gEFgN9G/rfB3mbxoJpALrvNKeBh50v38QeMr9/nysGVEFGA4sdqfHAunuvzHu9zHubUvc+4r72HFt/Zvr/f54INX9PgLYDPT3s2sgQLj7vQNY7M7vh8AEd/qrwB3u93cCr7rfTwA+cL/v7/43EQT0cP9bsR9P/8bXXLoAAALpSURBVG6A+4D3gS/dn/3uGrTmq72U+JtdM+B4Y4yZDxTWSx4PvOV+/xZwiVf628ayCIgWkXjgPGCOMabQGLMXmAOMdW+LNMYsMta/ire9znVMMMbsMcascL8vATZgTQfuT9fAGGNK3R8d7pcBzgI+cqfXvwa11+Yj4Gz3U8x4YKYxptIYsx3YivVv5rj4dyMiicAFwDT3Z8HPrkFray+B/2DXDDhedTbG7HG/zwY6u9839fsPlJ7ZSPoxyf24fhJWidevroG7imPV/7d3xqxRREEc/71CVBA0CXYpJJUQSJUmaCGWZ/A7aLrYWNkE/ABJk0D8CCIo+gkUCztBFE0jXhI7MSDEtFEnxczF3UMlgcW97Px/8Li38+6WN8Pt7DL/t7vADn7S2gR2zexHfKU670NfY/w7MMHxYzNqrAL3gF+xPUG+GDRKVxJ/OuIqtfNLskop54CnwF0z26uOZYiBmf00f3XpJH51ernlKf1XSinzwI6ZvWl7Ll2iK4n/SO8M6ABfo0RBfA4egf03//9ln/yDfaQopZzCk/5DM3sW5lQxGGBmu8BLYA4vYw2erFud96GvMX4e+MbxYzNKXAFullI+42WY68AauWLQPG2LDE00/PHSW7hoMxBoptueVwN+XaIu7q5QFzaXo3+DurD5OuzjwDYuao5FfzzGhoXNXtv+Dvle8Lr76pA9UwwuAheifxZ4BcwDT6gLm4vRv0Nd2Hwc/WnqwuYWLmqeqOMGuMZvcTdlDBqLZdsTaPBP0cNXfmwCS23PpwF/HgFfgH287riA1ypfAJ+A55UEVoAH4fsHYLayn9u4kNUHblXss8BG/GaduJlvVBpwFS/jvAfeResli8EM8DZisAHcD/sUftLqRwI8HfYzsd2P8anKvpbCz49UVi+dpONmKPGnjEFTTXfuCiFEMrpS4xdCCHFElPiFECIZSvxCCJEMJX4hhEiGEr8QQiRDiV8IIZKhxC+EEMlQ4hdCiGQcALM9v/1Vud/mAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "iteration:    200, epoch:   1, train loss: 1.701466, valid loss: 0.000000, valid accuracy: 0.00%\n",
      "iteration:    300, epoch:   1, train loss: 1.734503, valid loss: 0.000000, valid accuracy: 0.00%\n",
      "iteration:    400, epoch:   2, train loss: 1.791332, valid loss: 1.728019, valid accuracy: 39.09%\n",
      "iteration:    500, epoch:   2, train loss: 1.562372, valid loss: 1.728019, valid accuracy: 39.09%\n",
      "iteration:    600, epoch:   2, train loss: 1.854429, valid loss: 1.728019, valid accuracy: 39.09%\n",
      "iteration:    700, epoch:   2, train loss: 1.617573, valid loss: 1.728019, valid accuracy: 39.09%\n",
      "iteration:    800, epoch:   3, train loss: 1.596760, valid loss: 1.581195, valid accuracy: 44.67%\n",
      "iteration:    900, epoch:   3, train loss: 1.710782, valid loss: 1.581195, valid accuracy: 44.67%\n",
      "iteration:   1000, epoch:   3, train loss: 1.459980, valid loss: 1.581195, valid accuracy: 44.67%\n",
      "iteration:   1100, epoch:   3, train loss: 1.510775, valid loss: 1.581195, valid accuracy: 44.67%\n",
      "iteration:   1200, epoch:   4, train loss: 1.437460, valid loss: 1.394019, valid accuracy: 52.32%\n",
      "iteration:   1300, epoch:   4, train loss: 1.633822, valid loss: 1.394019, valid accuracy: 52.32%\n",
      "iteration:   1400, epoch:   4, train loss: 1.339049, valid loss: 1.394019, valid accuracy: 52.32%\n",
      "iteration:   1500, epoch:   4, train loss: 1.746183, valid loss: 1.394019, valid accuracy: 52.32%\n",
      "iteration:   1600, epoch:   5, train loss: 1.353775, valid loss: 1.204452, valid accuracy: 58.24%\n",
      "iteration:   1700, epoch:   5, train loss: 1.331746, valid loss: 1.204452, valid accuracy: 58.24%\n",
      "iteration:   1800, epoch:   5, train loss: 1.451209, valid loss: 1.204452, valid accuracy: 58.24%\n",
      "iteration:   1900, epoch:   5, train loss: 1.299384, valid loss: 1.204452, valid accuracy: 58.24%\n",
      "iteration:   2000, epoch:   6, train loss: 1.229280, valid loss: 1.133409, valid accuracy: 61.01%\n",
      "iteration:   2100, epoch:   6, train loss: 1.302083, valid loss: 1.133409, valid accuracy: 61.01%\n",
      "iteration:   2200, epoch:   6, train loss: 1.056568, valid loss: 1.133409, valid accuracy: 61.01%\n",
      "iteration:   2300, epoch:   6, train loss: 1.193543, valid loss: 1.133409, valid accuracy: 61.01%\n",
      "iteration:   2400, epoch:   7, train loss: 0.961386, valid loss: 1.011641, valid accuracy: 65.48%\n",
      "iteration:   2500, epoch:   7, train loss: 1.014898, valid loss: 1.011641, valid accuracy: 65.48%\n",
      "iteration:   2600, epoch:   7, train loss: 1.048413, valid loss: 1.011641, valid accuracy: 65.48%\n",
      "iteration:   2700, epoch:   7, train loss: 1.024225, valid loss: 1.011641, valid accuracy: 65.48%\n",
      "iteration:   2800, epoch:   8, train loss: 0.969505, valid loss: 0.926349, valid accuracy: 67.77%\n",
      "iteration:   2900, epoch:   8, train loss: 0.939488, valid loss: 0.926349, valid accuracy: 67.77%\n",
      "iteration:   3000, epoch:   8, train loss: 0.907408, valid loss: 0.926349, valid accuracy: 67.77%\n",
      "iteration:   3100, epoch:   8, train loss: 1.169622, valid loss: 0.926349, valid accuracy: 67.77%\n",
      "iteration:   3200, epoch:   9, train loss: 0.936143, valid loss: 0.842208, valid accuracy: 70.76%\n",
      "iteration:   3300, epoch:   9, train loss: 0.972596, valid loss: 0.842208, valid accuracy: 70.76%\n",
      "iteration:   3400, epoch:   9, train loss: 1.069873, valid loss: 0.842208, valid accuracy: 70.76%\n",
      "iteration:   3500, epoch:   9, train loss: 1.012619, valid loss: 0.842208, valid accuracy: 70.76%\n",
      "iteration:   3600, epoch:  10, train loss: 0.916192, valid loss: 0.747772, valid accuracy: 74.97%\n",
      "iteration:   3700, epoch:  10, train loss: 0.804797, valid loss: 0.747772, valid accuracy: 74.97%\n",
      "iteration:   3800, epoch:  10, train loss: 0.821130, valid loss: 0.747772, valid accuracy: 74.97%\n",
      "iteration:   3900, epoch:  10, train loss: 0.823300, valid loss: 0.747772, valid accuracy: 74.97%\n",
      "iteration:   4000, epoch:  11, train loss: 0.875795, valid loss: 0.715233, valid accuracy: 76.54%\n",
      "iteration:   4100, epoch:  11, train loss: 0.912061, valid loss: 0.715233, valid accuracy: 76.54%\n",
      "iteration:   4200, epoch:  11, train loss: 0.843487, valid loss: 0.715233, valid accuracy: 76.54%\n",
      "iteration:   4300, epoch:  11, train loss: 0.839164, valid loss: 0.715233, valid accuracy: 76.54%\n",
      "iteration:   4400, epoch:  12, train loss: 0.768521, valid loss: 0.787551, valid accuracy: 72.60%\n",
      "iteration:   4500, epoch:  12, train loss: 0.881795, valid loss: 0.787551, valid accuracy: 72.60%\n",
      "iteration:   4600, epoch:  12, train loss: 0.787905, valid loss: 0.787551, valid accuracy: 72.60%\n",
      "iteration:   4700, epoch:  13, train loss: 0.702438, valid loss: 0.612529, valid accuracy: 80.31%\n",
      "iteration:   4800, epoch:  13, train loss: 0.721619, valid loss: 0.612529, valid accuracy: 80.31%\n",
      "iteration:   4900, epoch:  13, train loss: 0.779005, valid loss: 0.612529, valid accuracy: 80.31%\n",
      "iteration:   5000, epoch:  13, train loss: 0.775914, valid loss: 0.612529, valid accuracy: 80.31%\n",
      "iteration:   5100, epoch:  14, train loss: 0.824137, valid loss: 0.529345, valid accuracy: 82.77%\n",
      "iteration:   5200, epoch:  14, train loss: 0.710661, valid loss: 0.529345, valid accuracy: 82.77%\n",
      "iteration:   5300, epoch:  14, train loss: 0.830143, valid loss: 0.529345, valid accuracy: 82.77%\n",
      "iteration:   5400, epoch:  14, train loss: 0.701084, valid loss: 0.529345, valid accuracy: 82.77%\n",
      "iteration:   5500, epoch:  15, train loss: 0.703782, valid loss: 0.668339, valid accuracy: 77.78%\n",
      "iteration:   5600, epoch:  15, train loss: 0.693822, valid loss: 0.668339, valid accuracy: 77.78%\n",
      "iteration:   5700, epoch:  15, train loss: 0.709692, valid loss: 0.668339, valid accuracy: 77.78%\n",
      "iteration:   5800, epoch:  15, train loss: 0.609514, valid loss: 0.668339, valid accuracy: 77.78%\n",
      "iteration:   5900, epoch:  16, train loss: 0.650571, valid loss: 0.518388, valid accuracy: 83.60%\n",
      "iteration:   6000, epoch:  16, train loss: 0.662611, valid loss: 0.518388, valid accuracy: 83.60%\n",
      "iteration:   6100, epoch:  16, train loss: 0.723383, valid loss: 0.518388, valid accuracy: 83.60%\n",
      "iteration:   6200, epoch:  16, train loss: 0.695608, valid loss: 0.518388, valid accuracy: 83.60%\n",
      "iteration:   6300, epoch:  17, train loss: 0.508379, valid loss: 0.507342, valid accuracy: 83.67%\n",
      "iteration:   6400, epoch:  17, train loss: 0.549884, valid loss: 0.507342, valid accuracy: 83.67%\n",
      "iteration:   6500, epoch:  17, train loss: 0.644710, valid loss: 0.507342, valid accuracy: 83.67%\n",
      "iteration:   6600, epoch:  17, train loss: 0.604956, valid loss: 0.507342, valid accuracy: 83.67%\n",
      "iteration:   6700, epoch:  18, train loss: 0.615933, valid loss: 0.456867, valid accuracy: 85.46%\n",
      "iteration:   6800, epoch:  18, train loss: 0.703455, valid loss: 0.456867, valid accuracy: 85.46%\n",
      "iteration:   6900, epoch:  18, train loss: 0.673985, valid loss: 0.456867, valid accuracy: 85.46%\n",
      "iteration:   7000, epoch:  18, train loss: 0.755875, valid loss: 0.456867, valid accuracy: 85.46%\n",
      "iteration:   7100, epoch:  19, train loss: 0.581781, valid loss: 0.477765, valid accuracy: 84.74%\n",
      "iteration:   7200, epoch:  19, train loss: 0.649344, valid loss: 0.477765, valid accuracy: 84.74%\n",
      "iteration:   7300, epoch:  19, train loss: 0.573636, valid loss: 0.477765, valid accuracy: 84.74%\n",
      "iteration:   7400, epoch:  19, train loss: 0.612095, valid loss: 0.477765, valid accuracy: 84.74%\n",
      "iteration:   7500, epoch:  20, train loss: 0.564594, valid loss: 0.401103, valid accuracy: 87.03%\n",
      "iteration:   7600, epoch:  20, train loss: 0.584351, valid loss: 0.401103, valid accuracy: 87.03%\n",
      "iteration:   7700, epoch:  20, train loss: 0.568602, valid loss: 0.401103, valid accuracy: 87.03%\n",
      "iteration:   7800, epoch:  20, train loss: 0.578204, valid loss: 0.401103, valid accuracy: 87.03%\n",
      "iteration:   7900, epoch:  21, train loss: 0.548399, valid loss: 0.406276, valid accuracy: 87.18%\n",
      "iteration:   8000, epoch:  21, train loss: 0.548512, valid loss: 0.406276, valid accuracy: 87.18%\n",
      "iteration:   8100, epoch:  21, train loss: 0.673720, valid loss: 0.406276, valid accuracy: 87.18%\n",
      "iteration:   8200, epoch:  21, train loss: 0.562136, valid loss: 0.406276, valid accuracy: 87.18%\n",
      "iteration:   8300, epoch:  22, train loss: 0.578406, valid loss: 0.433347, valid accuracy: 86.88%\n",
      "iteration:   8400, epoch:  22, train loss: 0.590488, valid loss: 0.433347, valid accuracy: 86.88%\n",
      "iteration:   8500, epoch:  22, train loss: 0.517650, valid loss: 0.433347, valid accuracy: 86.88%\n",
      "iteration:   8600, epoch:  22, train loss: 0.509144, valid loss: 0.433347, valid accuracy: 86.88%\n",
      "iteration:   8700, epoch:  23, train loss: 0.530202, valid loss: 0.396808, valid accuracy: 87.70%\n",
      "iteration:   8800, epoch:  23, train loss: 0.520704, valid loss: 0.396808, valid accuracy: 87.70%\n",
      "iteration:   8900, epoch:  23, train loss: 0.556167, valid loss: 0.396808, valid accuracy: 87.70%\n",
      "iteration:   9000, epoch:  24, train loss: 0.473539, valid loss: 0.419837, valid accuracy: 87.02%\n",
      "iteration:   9100, epoch:  24, train loss: 0.591864, valid loss: 0.419837, valid accuracy: 87.02%\n",
      "iteration:   9200, epoch:  24, train loss: 0.561373, valid loss: 0.419837, valid accuracy: 87.02%\n",
      "iteration:   9300, epoch:  24, train loss: 0.477779, valid loss: 0.419837, valid accuracy: 87.02%\n",
      "iteration:   9400, epoch:  25, train loss: 0.580093, valid loss: 0.390724, valid accuracy: 88.36%\n",
      "iteration:   9500, epoch:  25, train loss: 0.498513, valid loss: 0.390724, valid accuracy: 88.36%\n",
      "iteration:   9600, epoch:  25, train loss: 0.507338, valid loss: 0.390724, valid accuracy: 88.36%\n",
      "iteration:   9700, epoch:  25, train loss: 0.549673, valid loss: 0.390724, valid accuracy: 88.36%\n",
      "iteration:   9800, epoch:  26, train loss: 0.481792, valid loss: 0.402071, valid accuracy: 87.17%\n",
      "iteration:   9900, epoch:  26, train loss: 0.610361, valid loss: 0.402071, valid accuracy: 87.17%\n",
      "iteration:  10000, epoch:  26, train loss: 0.538065, valid loss: 0.402071, valid accuracy: 87.17%\n",
      "iteration:  10100, epoch:  26, train loss: 0.482886, valid loss: 0.402071, valid accuracy: 87.17%\n",
      "iteration:  10200, epoch:  27, train loss: 0.475126, valid loss: 0.393838, valid accuracy: 87.19%\n",
      "iteration:  10300, epoch:  27, train loss: 0.487575, valid loss: 0.393838, valid accuracy: 87.19%\n",
      "iteration:  10400, epoch:  27, train loss: 0.543160, valid loss: 0.393838, valid accuracy: 87.19%\n",
      "iteration:  10500, epoch:  27, train loss: 0.467564, valid loss: 0.393838, valid accuracy: 87.19%\n",
      "iteration:  10600, epoch:  28, train loss: 0.487353, valid loss: 0.369168, valid accuracy: 88.48%\n",
      "iteration:  10700, epoch:  28, train loss: 0.461376, valid loss: 0.369168, valid accuracy: 88.48%\n",
      "iteration:  10800, epoch:  28, train loss: 0.448272, valid loss: 0.369168, valid accuracy: 88.48%\n",
      "iteration:  10900, epoch:  28, train loss: 0.551579, valid loss: 0.369168, valid accuracy: 88.48%\n",
      "iteration:  11000, epoch:  29, train loss: 0.464824, valid loss: 0.352586, valid accuracy: 89.48%\n",
      "iteration:  11100, epoch:  29, train loss: 0.454800, valid loss: 0.352586, valid accuracy: 89.48%\n",
      "iteration:  11200, epoch:  29, train loss: 0.500000, valid loss: 0.352586, valid accuracy: 89.48%\n",
      "iteration:  11300, epoch:  29, train loss: 0.458018, valid loss: 0.352586, valid accuracy: 89.48%\n",
      "iteration:  11400, epoch:  30, train loss: 0.437776, valid loss: 0.366980, valid accuracy: 89.30%\n",
      "iteration:  11500, epoch:  30, train loss: 0.472099, valid loss: 0.366980, valid accuracy: 89.30%\n",
      "iteration:  11600, epoch:  30, train loss: 0.531350, valid loss: 0.366980, valid accuracy: 89.30%\n",
      "iteration:  11700, epoch:  30, train loss: 0.523197, valid loss: 0.366980, valid accuracy: 89.30%\n",
      "iteration:  11800, epoch:  31, train loss: 0.483415, valid loss: 0.381386, valid accuracy: 88.13%\n",
      "iteration:  11900, epoch:  31, train loss: 0.436742, valid loss: 0.381386, valid accuracy: 88.13%\n",
      "iteration:  12000, epoch:  31, train loss: 0.481010, valid loss: 0.381386, valid accuracy: 88.13%\n",
      "iteration:  12100, epoch:  31, train loss: 0.490645, valid loss: 0.381386, valid accuracy: 88.13%\n",
      "iteration:  12200, epoch:  32, train loss: 0.465694, valid loss: 0.357951, valid accuracy: 89.36%\n",
      "iteration:  12300, epoch:  32, train loss: 0.483698, valid loss: 0.357951, valid accuracy: 89.36%\n",
      "iteration:  12400, epoch:  32, train loss: 0.428520, valid loss: 0.357951, valid accuracy: 89.36%\n",
      "iteration:  12500, epoch:  32, train loss: 0.516598, valid loss: 0.357951, valid accuracy: 89.36%\n",
      "iteration:  12600, epoch:  33, train loss: 0.468678, valid loss: 0.349724, valid accuracy: 89.64%\n",
      "iteration:  12700, epoch:  33, train loss: 0.391840, valid loss: 0.349724, valid accuracy: 89.64%\n",
      "iteration:  12800, epoch:  33, train loss: 0.409594, valid loss: 0.349724, valid accuracy: 89.64%\n",
      "iteration:  12900, epoch:  33, train loss: 0.453164, valid loss: 0.349724, valid accuracy: 89.64%\n",
      "iteration:  13000, epoch:  34, train loss: 0.454236, valid loss: 0.360663, valid accuracy: 89.25%\n",
      "iteration:  13100, epoch:  34, train loss: 0.421641, valid loss: 0.360663, valid accuracy: 89.25%\n",
      "iteration:  13200, epoch:  34, train loss: 0.467923, valid loss: 0.360663, valid accuracy: 89.25%\n",
      "iteration:  13300, epoch:  35, train loss: 0.432154, valid loss: 0.336303, valid accuracy: 90.45%\n",
      "iteration:  13400, epoch:  35, train loss: 0.400624, valid loss: 0.336303, valid accuracy: 90.45%\n",
      "iteration:  13500, epoch:  35, train loss: 0.453277, valid loss: 0.336303, valid accuracy: 90.45%\n",
      "iteration:  13600, epoch:  35, train loss: 0.451523, valid loss: 0.336303, valid accuracy: 90.45%\n",
      "iteration:  13700, epoch:  36, train loss: 0.436745, valid loss: 0.376687, valid accuracy: 88.62%\n",
      "iteration:  13800, epoch:  36, train loss: 0.426557, valid loss: 0.376687, valid accuracy: 88.62%\n",
      "iteration:  13900, epoch:  36, train loss: 0.406236, valid loss: 0.376687, valid accuracy: 88.62%\n",
      "iteration:  14000, epoch:  36, train loss: 0.445812, valid loss: 0.376687, valid accuracy: 88.62%\n",
      "iteration:  14100, epoch:  37, train loss: 0.420210, valid loss: 0.332355, valid accuracy: 90.20%\n",
      "iteration:  14200, epoch:  37, train loss: 0.447230, valid loss: 0.332355, valid accuracy: 90.20%\n",
      "iteration:  14300, epoch:  37, train loss: 0.413585, valid loss: 0.332355, valid accuracy: 90.20%\n",
      "iteration:  14400, epoch:  37, train loss: 0.391397, valid loss: 0.332355, valid accuracy: 90.20%\n",
      "iteration:  14500, epoch:  38, train loss: 0.389758, valid loss: 0.352889, valid accuracy: 89.82%\n",
      "iteration:  14600, epoch:  38, train loss: 0.399717, valid loss: 0.352889, valid accuracy: 89.82%\n",
      "iteration:  14700, epoch:  38, train loss: 0.429855, valid loss: 0.352889, valid accuracy: 89.82%\n",
      "iteration:  14800, epoch:  38, train loss: 0.408361, valid loss: 0.352889, valid accuracy: 89.82%\n",
      "iteration:  14900, epoch:  39, train loss: 0.439538, valid loss: 0.365326, valid accuracy: 89.67%\n",
      "iteration:  15000, epoch:  39, train loss: 0.412214, valid loss: 0.365326, valid accuracy: 89.67%\n",
      "iteration:  15100, epoch:  39, train loss: 0.420869, valid loss: 0.365326, valid accuracy: 89.67%\n",
      "iteration:  15200, epoch:  39, train loss: 0.445951, valid loss: 0.365326, valid accuracy: 89.67%\n",
      "iteration:  15300, epoch:  40, train loss: 0.373682, valid loss: 0.359643, valid accuracy: 89.10%\n",
      "iteration:  15400, epoch:  40, train loss: 0.400643, valid loss: 0.359643, valid accuracy: 89.10%\n",
      "iteration:  15500, epoch:  40, train loss: 0.384529, valid loss: 0.359643, valid accuracy: 89.10%\n",
      "iteration:  15600, epoch:  40, train loss: 0.420415, valid loss: 0.359643, valid accuracy: 89.10%\n",
      "iteration:  15700, epoch:  41, train loss: 0.397849, valid loss: 0.323973, valid accuracy: 90.78%\n",
      "iteration:  15800, epoch:  41, train loss: 0.425228, valid loss: 0.323973, valid accuracy: 90.78%\n",
      "iteration:  15900, epoch:  41, train loss: 0.454288, valid loss: 0.323973, valid accuracy: 90.78%\n",
      "iteration:  16000, epoch:  41, train loss: 0.404330, valid loss: 0.323973, valid accuracy: 90.78%\n",
      "iteration:  16100, epoch:  42, train loss: 0.367150, valid loss: 0.320978, valid accuracy: 91.07%\n",
      "iteration:  16200, epoch:  42, train loss: 0.386870, valid loss: 0.320978, valid accuracy: 91.07%\n",
      "iteration:  16300, epoch:  42, train loss: 0.375438, valid loss: 0.320978, valid accuracy: 91.07%\n",
      "iteration:  16400, epoch:  42, train loss: 0.445711, valid loss: 0.320978, valid accuracy: 91.07%\n",
      "iteration:  16500, epoch:  43, train loss: 0.350978, valid loss: 0.317596, valid accuracy: 91.29%\n",
      "iteration:  16600, epoch:  43, train loss: 0.424502, valid loss: 0.317596, valid accuracy: 91.29%\n",
      "iteration:  16700, epoch:  43, train loss: 0.377374, valid loss: 0.317596, valid accuracy: 91.29%\n",
      "iteration:  16800, epoch:  43, train loss: 0.421737, valid loss: 0.317596, valid accuracy: 91.29%\n",
      "iteration:  16900, epoch:  44, train loss: 0.360576, valid loss: 0.343655, valid accuracy: 90.40%\n",
      "iteration:  17000, epoch:  44, train loss: 0.417443, valid loss: 0.343655, valid accuracy: 90.40%\n",
      "iteration:  17100, epoch:  44, train loss: 0.372511, valid loss: 0.343655, valid accuracy: 90.40%\n",
      "iteration:  17200, epoch:  44, train loss: 0.377557, valid loss: 0.343655, valid accuracy: 90.40%\n",
      "iteration:  17300, epoch:  45, train loss: 0.387423, valid loss: 0.315636, valid accuracy: 90.91%\n",
      "iteration:  17400, epoch:  45, train loss: 0.380064, valid loss: 0.315636, valid accuracy: 90.91%\n",
      "iteration:  17500, epoch:  45, train loss: 0.376683, valid loss: 0.315636, valid accuracy: 90.91%\n",
      "iteration:  17600, epoch:  46, train loss: 0.366098, valid loss: 0.306928, valid accuracy: 91.75%\n",
      "iteration:  17700, epoch:  46, train loss: 0.397472, valid loss: 0.306928, valid accuracy: 91.75%\n",
      "iteration:  17800, epoch:  46, train loss: 0.394058, valid loss: 0.306928, valid accuracy: 91.75%\n",
      "iteration:  17900, epoch:  46, train loss: 0.357775, valid loss: 0.306928, valid accuracy: 91.75%\n",
      "iteration:  18000, epoch:  47, train loss: 0.371820, valid loss: 0.330683, valid accuracy: 91.26%\n",
      "iteration:  18100, epoch:  47, train loss: 0.397528, valid loss: 0.330683, valid accuracy: 91.26%\n",
      "iteration:  18200, epoch:  47, train loss: 0.371485, valid loss: 0.330683, valid accuracy: 91.26%\n",
      "iteration:  18300, epoch:  47, train loss: 0.384522, valid loss: 0.330683, valid accuracy: 91.26%\n",
      "iteration:  18400, epoch:  48, train loss: 0.384912, valid loss: 0.320055, valid accuracy: 90.90%\n",
      "iteration:  18500, epoch:  48, train loss: 0.357815, valid loss: 0.320055, valid accuracy: 90.90%\n",
      "iteration:  18600, epoch:  48, train loss: 0.417638, valid loss: 0.320055, valid accuracy: 90.90%\n",
      "iteration:  18700, epoch:  48, train loss: 0.380251, valid loss: 0.320055, valid accuracy: 90.90%\n",
      "iteration:  18800, epoch:  49, train loss: 0.359067, valid loss: 0.323254, valid accuracy: 90.62%\n",
      "iteration:  18900, epoch:  49, train loss: 0.356399, valid loss: 0.323254, valid accuracy: 90.62%\n",
      "iteration:  19000, epoch:  49, train loss: 0.354387, valid loss: 0.323254, valid accuracy: 90.62%\n",
      "iteration:  19100, epoch:  49, train loss: 0.364480, valid loss: 0.323254, valid accuracy: 90.62%\n",
      "iteration:  19200, epoch:  50, train loss: 0.359921, valid loss: 0.329153, valid accuracy: 91.12%\n",
      "iteration:  19300, epoch:  50, train loss: 0.410238, valid loss: 0.329153, valid accuracy: 91.12%\n",
      "iteration:  19400, epoch:  50, train loss: 0.380975, valid loss: 0.329153, valid accuracy: 91.12%\n",
      "iteration:  19500, epoch:  50, train loss: 0.398338, valid loss: 0.329153, valid accuracy: 91.12%\n",
      "iteration:  19600, epoch:  51, train loss: 0.385895, valid loss: 0.319375, valid accuracy: 90.81%\n",
      "iteration:  19700, epoch:  51, train loss: 0.361062, valid loss: 0.319375, valid accuracy: 90.81%\n",
      "iteration:  19800, epoch:  51, train loss: 0.386251, valid loss: 0.319375, valid accuracy: 90.81%\n",
      "iteration:  19900, epoch:  51, train loss: 0.360746, valid loss: 0.319375, valid accuracy: 90.81%\n",
      "iteration:  20000, epoch:  52, train loss: 0.344961, valid loss: 0.337805, valid accuracy: 90.67%\n",
      "iteration:  20100, epoch:  52, train loss: 0.368706, valid loss: 0.337805, valid accuracy: 90.67%\n",
      "iteration:  20200, epoch:  52, train loss: 0.381333, valid loss: 0.337805, valid accuracy: 90.67%\n",
      "iteration:  20300, epoch:  52, train loss: 0.401217, valid loss: 0.337805, valid accuracy: 90.67%\n",
      "iteration:  20400, epoch:  53, train loss: 0.368161, valid loss: 0.318872, valid accuracy: 91.26%\n",
      "iteration:  20500, epoch:  53, train loss: 0.356404, valid loss: 0.318872, valid accuracy: 91.26%\n",
      "iteration:  20600, epoch:  53, train loss: 0.374847, valid loss: 0.318872, valid accuracy: 91.26%\n",
      "iteration:  20700, epoch:  53, train loss: 0.367280, valid loss: 0.318872, valid accuracy: 91.26%\n",
      "iteration:  20800, epoch:  54, train loss: 0.361153, valid loss: 0.300207, valid accuracy: 91.65%\n",
      "iteration:  20900, epoch:  54, train loss: 0.375697, valid loss: 0.300207, valid accuracy: 91.65%\n",
      "iteration:  21000, epoch:  54, train loss: 0.363057, valid loss: 0.300207, valid accuracy: 91.65%\n",
      "iteration:  21100, epoch:  54, train loss: 0.370735, valid loss: 0.300207, valid accuracy: 91.65%\n",
      "iteration:  21200, epoch:  55, train loss: 0.402477, valid loss: 0.321413, valid accuracy: 91.20%\n",
      "iteration:  21300, epoch:  55, train loss: 0.345695, valid loss: 0.321413, valid accuracy: 91.20%\n",
      "iteration:  21400, epoch:  55, train loss: 0.367232, valid loss: 0.321413, valid accuracy: 91.20%\n",
      "iteration:  21500, epoch:  55, train loss: 0.378744, valid loss: 0.321413, valid accuracy: 91.20%\n",
      "iteration:  21600, epoch:  56, train loss: 0.350989, valid loss: 0.314600, valid accuracy: 91.46%\n",
      "iteration:  21700, epoch:  56, train loss: 0.342386, valid loss: 0.314600, valid accuracy: 91.46%\n",
      "iteration:  21800, epoch:  56, train loss: 0.356195, valid loss: 0.314600, valid accuracy: 91.46%\n",
      "iteration:  21900, epoch:  57, train loss: 0.363199, valid loss: 0.327107, valid accuracy: 91.07%\n",
      "iteration:  22000, epoch:  57, train loss: 0.356554, valid loss: 0.327107, valid accuracy: 91.07%\n",
      "iteration:  22100, epoch:  57, train loss: 0.344819, valid loss: 0.327107, valid accuracy: 91.07%\n",
      "iteration:  22200, epoch:  57, train loss: 0.348212, valid loss: 0.327107, valid accuracy: 91.07%\n",
      "iteration:  22300, epoch:  58, train loss: 0.357340, valid loss: 0.304529, valid accuracy: 91.63%\n",
      "iteration:  22400, epoch:  58, train loss: 0.343752, valid loss: 0.304529, valid accuracy: 91.63%\n",
      "iteration:  22500, epoch:  58, train loss: 0.376185, valid loss: 0.304529, valid accuracy: 91.63%\n",
      "iteration:  22600, epoch:  58, train loss: 0.351699, valid loss: 0.304529, valid accuracy: 91.63%\n",
      "iteration:  22700, epoch:  59, train loss: 0.363513, valid loss: 0.312085, valid accuracy: 91.39%\n",
      "iteration:  22800, epoch:  59, train loss: 0.351346, valid loss: 0.312085, valid accuracy: 91.39%\n",
      "iteration:  22900, epoch:  59, train loss: 0.336739, valid loss: 0.312085, valid accuracy: 91.39%\n",
      "iteration:  23000, epoch:  59, train loss: 0.351340, valid loss: 0.312085, valid accuracy: 91.39%\n",
      "iteration:  23100, epoch:  60, train loss: 0.338680, valid loss: 0.317092, valid accuracy: 91.56%\n",
      "iteration:  23200, epoch:  60, train loss: 0.338127, valid loss: 0.317092, valid accuracy: 91.56%\n",
      "iteration:  23300, epoch:  60, train loss: 0.347884, valid loss: 0.317092, valid accuracy: 91.56%\n",
      "iteration:  23400, epoch:  60, train loss: 0.371086, valid loss: 0.317092, valid accuracy: 91.56%\n",
      "iteration:  23500, epoch:  61, train loss: 0.358591, valid loss: 0.328433, valid accuracy: 91.14%\n",
      "iteration:  23600, epoch:  61, train loss: 0.358131, valid loss: 0.328433, valid accuracy: 91.14%\n",
      "iteration:  23700, epoch:  61, train loss: 0.336586, valid loss: 0.328433, valid accuracy: 91.14%\n",
      "iteration:  23800, epoch:  61, train loss: 0.350924, valid loss: 0.328433, valid accuracy: 91.14%\n",
      "iteration:  23900, epoch:  62, train loss: 0.331386, valid loss: 0.328265, valid accuracy: 91.55%\n",
      "iteration:  24000, epoch:  62, train loss: 0.346296, valid loss: 0.328265, valid accuracy: 91.55%\n",
      "iteration:  24100, epoch:  62, train loss: 0.347726, valid loss: 0.328265, valid accuracy: 91.55%\n",
      "iteration:  24200, epoch:  62, train loss: 0.373697, valid loss: 0.328265, valid accuracy: 91.55%\n",
      "iteration:  24300, epoch:  63, train loss: 0.333688, valid loss: 0.339709, valid accuracy: 91.07%\n",
      "iteration:  24400, epoch:  63, train loss: 0.341529, valid loss: 0.339709, valid accuracy: 91.07%\n",
      "iteration:  24500, epoch:  63, train loss: 0.364969, valid loss: 0.339709, valid accuracy: 91.07%\n",
      "iteration:  24600, epoch:  63, train loss: 0.340850, valid loss: 0.339709, valid accuracy: 91.07%\n",
      "iteration:  24700, epoch:  64, train loss: 0.351636, valid loss: 0.309215, valid accuracy: 92.07%\n",
      "iteration:  24800, epoch:  64, train loss: 0.349757, valid loss: 0.309215, valid accuracy: 92.07%\n",
      "iteration:  24900, epoch:  64, train loss: 0.338656, valid loss: 0.309215, valid accuracy: 92.07%\n",
      "iteration:  25000, epoch:  64, train loss: 0.366476, valid loss: 0.309215, valid accuracy: 92.07%\n",
      "iteration:  25100, epoch:  65, train loss: 0.359358, valid loss: 0.316279, valid accuracy: 91.56%\n",
      "iteration:  25200, epoch:  65, train loss: 0.344439, valid loss: 0.316279, valid accuracy: 91.56%\n",
      "iteration:  25300, epoch:  65, train loss: 0.356880, valid loss: 0.316279, valid accuracy: 91.56%\n",
      "iteration:  25400, epoch:  65, train loss: 0.344120, valid loss: 0.316279, valid accuracy: 91.56%\n",
      "iteration:  25500, epoch:  66, train loss: 0.360414, valid loss: 0.344873, valid accuracy: 89.56%\n",
      "iteration:  25600, epoch:  66, train loss: 0.373386, valid loss: 0.344873, valid accuracy: 89.56%\n",
      "iteration:  25700, epoch:  66, train loss: 0.370859, valid loss: 0.344873, valid accuracy: 89.56%\n",
      "iteration:  25800, epoch:  66, train loss: 0.354121, valid loss: 0.344873, valid accuracy: 89.56%\n",
      "iteration:  25900, epoch:  67, train loss: 0.349283, valid loss: 0.314768, valid accuracy: 91.93%\n",
      "iteration:  26000, epoch:  67, train loss: 0.366900, valid loss: 0.314768, valid accuracy: 91.93%\n",
      "iteration:  26100, epoch:  67, train loss: 0.339405, valid loss: 0.314768, valid accuracy: 91.93%\n",
      "iteration:  26200, epoch:  68, train loss: 0.332868, valid loss: 0.315216, valid accuracy: 91.70%\n",
      "iteration:  26300, epoch:  68, train loss: 0.335023, valid loss: 0.315216, valid accuracy: 91.70%\n",
      "iteration:  26400, epoch:  68, train loss: 0.352010, valid loss: 0.315216, valid accuracy: 91.70%\n",
      "iteration:  26500, epoch:  68, train loss: 0.349353, valid loss: 0.315216, valid accuracy: 91.70%\n",
      "iteration:  26600, epoch:  69, train loss: 0.336626, valid loss: 0.348055, valid accuracy: 90.80%\n",
      "iteration:  26700, epoch:  69, train loss: 0.356816, valid loss: 0.348055, valid accuracy: 90.80%\n",
      "iteration:  26800, epoch:  69, train loss: 0.351074, valid loss: 0.348055, valid accuracy: 90.80%\n",
      "iteration:  26900, epoch:  69, train loss: 0.351254, valid loss: 0.348055, valid accuracy: 90.80%\n",
      "iteration:  27000, epoch:  70, train loss: 0.334510, valid loss: 0.338411, valid accuracy: 90.75%\n",
      "iteration:  27100, epoch:  70, train loss: 0.345564, valid loss: 0.338411, valid accuracy: 90.75%\n",
      "iteration:  27200, epoch:  70, train loss: 0.334288, valid loss: 0.338411, valid accuracy: 90.75%\n",
      "iteration:  27300, epoch:  70, train loss: 0.368425, valid loss: 0.338411, valid accuracy: 90.75%\n",
      "iteration:  27400, epoch:  71, train loss: 0.357187, valid loss: 0.326983, valid accuracy: 91.11%\n",
      "iteration:  27500, epoch:  71, train loss: 0.344959, valid loss: 0.326983, valid accuracy: 91.11%\n",
      "iteration:  27600, epoch:  71, train loss: 0.356085, valid loss: 0.326983, valid accuracy: 91.11%\n",
      "iteration:  27700, epoch:  71, train loss: 0.356955, valid loss: 0.326983, valid accuracy: 91.11%\n",
      "iteration:  27800, epoch:  72, train loss: 0.332145, valid loss: 0.349889, valid accuracy: 90.30%\n",
      "iteration:  27900, epoch:  72, train loss: 0.335835, valid loss: 0.349889, valid accuracy: 90.30%\n",
      "iteration:  28000, epoch:  72, train loss: 0.337740, valid loss: 0.349889, valid accuracy: 90.30%\n",
      "iteration:  28100, epoch:  72, train loss: 0.370219, valid loss: 0.349889, valid accuracy: 90.30%\n",
      "iteration:  28200, epoch:  73, train loss: 0.342360, valid loss: 0.324662, valid accuracy: 91.63%\n",
      "iteration:  28300, epoch:  73, train loss: 0.347349, valid loss: 0.324662, valid accuracy: 91.63%\n",
      "iteration:  28400, epoch:  73, train loss: 0.323115, valid loss: 0.324662, valid accuracy: 91.63%\n",
      "iteration:  28500, epoch:  73, train loss: 0.339788, valid loss: 0.324662, valid accuracy: 91.63%\n",
      "iteration:  28600, epoch:  74, train loss: 0.335250, valid loss: 0.327310, valid accuracy: 90.88%\n",
      "iteration:  28700, epoch:  74, train loss: 0.343119, valid loss: 0.327310, valid accuracy: 90.88%\n",
      "iteration:  28800, epoch:  74, train loss: 0.334289, valid loss: 0.327310, valid accuracy: 90.88%\n",
      "iteration:  28900, epoch:  74, train loss: 0.330244, valid loss: 0.327310, valid accuracy: 90.88%\n",
      "iteration:  29000, epoch:  75, train loss: 0.333707, valid loss: 0.313375, valid accuracy: 92.25%\n",
      "iteration:  29100, epoch:  75, train loss: 0.347601, valid loss: 0.313375, valid accuracy: 92.25%\n",
      "iteration:  29200, epoch:  75, train loss: 0.339743, valid loss: 0.313375, valid accuracy: 92.25%\n",
      "iteration:  29300, epoch:  75, train loss: 0.360573, valid loss: 0.313375, valid accuracy: 92.25%\n",
      "iteration:  29400, epoch:  76, train loss: 0.333285, valid loss: 0.302988, valid accuracy: 91.81%\n",
      "iteration:  29500, epoch:  76, train loss: 0.342436, valid loss: 0.302988, valid accuracy: 91.81%\n",
      "iteration:  29600, epoch:  76, train loss: 0.340237, valid loss: 0.302988, valid accuracy: 91.81%\n",
      "iteration:  29700, epoch:  76, train loss: 0.352782, valid loss: 0.302988, valid accuracy: 91.81%\n",
      "iteration:  29800, epoch:  77, train loss: 0.338808, valid loss: 0.306936, valid accuracy: 91.98%\n",
      "iteration:  29900, epoch:  77, train loss: 0.344952, valid loss: 0.306936, valid accuracy: 91.98%\n",
      "iteration:  30000, epoch:  77, train loss: 0.347761, valid loss: 0.306936, valid accuracy: 91.98%\n",
      "iteration:  30100, epoch:  77, train loss: 0.354483, valid loss: 0.306936, valid accuracy: 91.98%\n",
      "iteration:  30200, epoch:  78, train loss: 0.345524, valid loss: 0.328335, valid accuracy: 91.60%\n",
      "iteration:  30300, epoch:  78, train loss: 0.343568, valid loss: 0.328335, valid accuracy: 91.60%\n",
      "iteration:  30400, epoch:  78, train loss: 0.350947, valid loss: 0.328335, valid accuracy: 91.60%\n",
      "iteration:  30500, epoch:  79, train loss: 0.323068, valid loss: 0.322778, valid accuracy: 91.17%\n",
      "iteration:  30600, epoch:  79, train loss: 0.352734, valid loss: 0.322778, valid accuracy: 91.17%\n",
      "iteration:  30700, epoch:  79, train loss: 0.332606, valid loss: 0.322778, valid accuracy: 91.17%\n",
      "iteration:  30800, epoch:  79, train loss: 0.329542, valid loss: 0.322778, valid accuracy: 91.17%\n",
      "iteration:  30900, epoch:  80, train loss: 0.342192, valid loss: 0.330568, valid accuracy: 91.46%\n",
      "iteration:  31000, epoch:  80, train loss: 0.332171, valid loss: 0.330568, valid accuracy: 91.46%\n",
      "iteration:  31100, epoch:  80, train loss: 0.328009, valid loss: 0.330568, valid accuracy: 91.46%\n",
      "iteration:  31200, epoch:  80, train loss: 0.341086, valid loss: 0.330568, valid accuracy: 91.46%\n",
      "iteration:  31300, epoch:  81, train loss: 0.337771, valid loss: 0.310008, valid accuracy: 91.65%\n",
      "iteration:  31400, epoch:  81, train loss: 0.321734, valid loss: 0.310008, valid accuracy: 91.65%\n",
      "iteration:  31500, epoch:  81, train loss: 0.336665, valid loss: 0.310008, valid accuracy: 91.65%\n",
      "iteration:  31600, epoch:  81, train loss: 0.339184, valid loss: 0.310008, valid accuracy: 91.65%\n",
      "iteration:  31700, epoch:  82, train loss: 0.335612, valid loss: 0.338973, valid accuracy: 91.28%\n",
      "iteration:  31800, epoch:  82, train loss: 0.341520, valid loss: 0.338973, valid accuracy: 91.28%\n",
      "iteration:  31900, epoch:  82, train loss: 0.331775, valid loss: 0.338973, valid accuracy: 91.28%\n",
      "iteration:  32000, epoch:  82, train loss: 0.359904, valid loss: 0.338973, valid accuracy: 91.28%\n",
      "iteration:  32100, epoch:  83, train loss: 0.337576, valid loss: 0.332624, valid accuracy: 91.39%\n",
      "iteration:  32200, epoch:  83, train loss: 0.317986, valid loss: 0.332624, valid accuracy: 91.39%\n",
      "iteration:  32300, epoch:  83, train loss: 0.327004, valid loss: 0.332624, valid accuracy: 91.39%\n",
      "iteration:  32400, epoch:  83, train loss: 0.330877, valid loss: 0.332624, valid accuracy: 91.39%\n",
      "iteration:  32500, epoch:  84, train loss: 0.320918, valid loss: 0.334781, valid accuracy: 90.90%\n",
      "iteration:  32600, epoch:  84, train loss: 0.326243, valid loss: 0.334781, valid accuracy: 90.90%\n",
      "iteration:  32700, epoch:  84, train loss: 0.325002, valid loss: 0.334781, valid accuracy: 90.90%\n",
      "iteration:  32800, epoch:  84, train loss: 0.333145, valid loss: 0.334781, valid accuracy: 90.90%\n",
      "iteration:  32900, epoch:  85, train loss: 0.355589, valid loss: 0.304031, valid accuracy: 92.45%\n",
      "iteration:  33000, epoch:  85, train loss: 0.325859, valid loss: 0.304031, valid accuracy: 92.45%\n",
      "iteration:  33100, epoch:  85, train loss: 0.348046, valid loss: 0.304031, valid accuracy: 92.45%\n",
      "iteration:  33200, epoch:  85, train loss: 0.360704, valid loss: 0.304031, valid accuracy: 92.45%\n",
      "iteration:  33300, epoch:  86, train loss: 0.327113, valid loss: 0.314401, valid accuracy: 92.01%\n",
      "iteration:  33400, epoch:  86, train loss: 0.326833, valid loss: 0.314401, valid accuracy: 92.01%\n",
      "iteration:  33500, epoch:  86, train loss: 0.337687, valid loss: 0.314401, valid accuracy: 92.01%\n",
      "iteration:  33600, epoch:  86, train loss: 0.345409, valid loss: 0.314401, valid accuracy: 92.01%\n",
      "iteration:  33700, epoch:  87, train loss: 0.326492, valid loss: 0.312628, valid accuracy: 92.15%\n",
      "iteration:  33800, epoch:  87, train loss: 0.329460, valid loss: 0.312628, valid accuracy: 92.15%\n",
      "iteration:  33900, epoch:  87, train loss: 0.328521, valid loss: 0.312628, valid accuracy: 92.15%\n",
      "iteration:  34000, epoch:  87, train loss: 0.330142, valid loss: 0.312628, valid accuracy: 92.15%\n",
      "iteration:  34100, epoch:  88, train loss: 0.321543, valid loss: 0.309540, valid accuracy: 92.47%\n",
      "iteration:  34200, epoch:  88, train loss: 0.355868, valid loss: 0.309540, valid accuracy: 92.47%\n",
      "iteration:  34300, epoch:  88, train loss: 0.337278, valid loss: 0.309540, valid accuracy: 92.47%\n",
      "iteration:  34400, epoch:  88, train loss: 0.380813, valid loss: 0.309540, valid accuracy: 92.47%\n",
      "iteration:  34500, epoch:  89, train loss: 0.331258, valid loss: 0.300668, valid accuracy: 91.88%\n",
      "iteration:  34600, epoch:  89, train loss: 0.329561, valid loss: 0.300668, valid accuracy: 91.88%\n",
      "iteration:  34700, epoch:  89, train loss: 0.327598, valid loss: 0.300668, valid accuracy: 91.88%\n",
      "iteration:  34800, epoch:  90, train loss: 0.321156, valid loss: 0.317012, valid accuracy: 92.00%\n",
      "iteration:  34900, epoch:  90, train loss: 0.320364, valid loss: 0.317012, valid accuracy: 92.00%\n",
      "iteration:  35000, epoch:  90, train loss: 0.327843, valid loss: 0.317012, valid accuracy: 92.00%\n",
      "iteration:  35100, epoch:  90, train loss: 0.322537, valid loss: 0.317012, valid accuracy: 92.00%\n",
      "iteration:  35200, epoch:  91, train loss: 0.323732, valid loss: 0.337180, valid accuracy: 91.23%\n",
      "iteration:  35300, epoch:  91, train loss: 0.335697, valid loss: 0.337180, valid accuracy: 91.23%\n",
      "iteration:  35400, epoch:  91, train loss: 0.340552, valid loss: 0.337180, valid accuracy: 91.23%\n",
      "iteration:  35500, epoch:  91, train loss: 0.330855, valid loss: 0.337180, valid accuracy: 91.23%\n",
      "iteration:  35600, epoch:  92, train loss: 0.336688, valid loss: 0.318126, valid accuracy: 92.17%\n",
      "iteration:  35700, epoch:  92, train loss: 0.325743, valid loss: 0.318126, valid accuracy: 92.17%\n",
      "iteration:  35800, epoch:  92, train loss: 0.327234, valid loss: 0.318126, valid accuracy: 92.17%\n",
      "iteration:  35900, epoch:  92, train loss: 0.335457, valid loss: 0.318126, valid accuracy: 92.17%\n",
      "iteration:  36000, epoch:  93, train loss: 0.320620, valid loss: 0.311293, valid accuracy: 92.44%\n",
      "iteration:  36100, epoch:  93, train loss: 0.338622, valid loss: 0.311293, valid accuracy: 92.44%\n",
      "iteration:  36200, epoch:  93, train loss: 0.332984, valid loss: 0.311293, valid accuracy: 92.44%\n",
      "iteration:  36300, epoch:  93, train loss: 0.319419, valid loss: 0.311293, valid accuracy: 92.44%\n",
      "iteration:  36400, epoch:  94, train loss: 0.321905, valid loss: 0.312508, valid accuracy: 92.18%\n",
      "iteration:  36500, epoch:  94, train loss: 0.322178, valid loss: 0.312508, valid accuracy: 92.18%\n",
      "iteration:  36600, epoch:  94, train loss: 0.324552, valid loss: 0.312508, valid accuracy: 92.18%\n",
      "iteration:  36700, epoch:  94, train loss: 0.334042, valid loss: 0.312508, valid accuracy: 92.18%\n",
      "iteration:  36800, epoch:  95, train loss: 0.332735, valid loss: 0.332312, valid accuracy: 91.56%\n",
      "iteration:  36900, epoch:  95, train loss: 0.311231, valid loss: 0.332312, valid accuracy: 91.56%\n",
      "iteration:  37000, epoch:  95, train loss: 0.340259, valid loss: 0.332312, valid accuracy: 91.56%\n",
      "iteration:  37100, epoch:  95, train loss: 0.326286, valid loss: 0.332312, valid accuracy: 91.56%\n",
      "iteration:  37200, epoch:  96, train loss: 0.327076, valid loss: 0.316575, valid accuracy: 91.75%\n",
      "iteration:  37300, epoch:  96, train loss: 0.337835, valid loss: 0.316575, valid accuracy: 91.75%\n",
      "iteration:  37400, epoch:  96, train loss: 0.333683, valid loss: 0.316575, valid accuracy: 91.75%\n",
      "iteration:  37500, epoch:  96, train loss: 0.336433, valid loss: 0.316575, valid accuracy: 91.75%\n",
      "iteration:  37600, epoch:  97, train loss: 0.318622, valid loss: 0.305304, valid accuracy: 92.35%\n",
      "iteration:  37700, epoch:  97, train loss: 0.313068, valid loss: 0.305304, valid accuracy: 92.35%\n",
      "iteration:  37800, epoch:  97, train loss: 0.321365, valid loss: 0.305304, valid accuracy: 92.35%\n",
      "iteration:  37900, epoch:  97, train loss: 0.334830, valid loss: 0.305304, valid accuracy: 92.35%\n",
      "iteration:  38000, epoch:  98, train loss: 0.328789, valid loss: 0.314918, valid accuracy: 91.54%\n",
      "iteration:  38100, epoch:  98, train loss: 0.329775, valid loss: 0.314918, valid accuracy: 91.54%\n",
      "iteration:  38200, epoch:  98, train loss: 0.329625, valid loss: 0.314918, valid accuracy: 91.54%\n",
      "iteration:  38300, epoch:  98, train loss: 0.321481, valid loss: 0.314918, valid accuracy: 91.54%\n",
      "iteration:  38400, epoch:  99, train loss: 0.318434, valid loss: 0.319918, valid accuracy: 91.66%\n",
      "iteration:  38500, epoch:  99, train loss: 0.331562, valid loss: 0.319918, valid accuracy: 91.66%\n",
      "iteration:  38600, epoch:  99, train loss: 0.330140, valid loss: 0.319918, valid accuracy: 91.66%\n",
      "iteration:  38700, epoch:  99, train loss: 0.337970, valid loss: 0.319918, valid accuracy: 91.66%\n",
      "iteration:  38800, epoch: 100, train loss: 0.319433, valid loss: 0.331988, valid accuracy: 91.59%\n",
      "iteration:  38900, epoch: 100, train loss: 0.325235, valid loss: 0.331988, valid accuracy: 91.59%\n",
      "iteration:  39000, epoch: 100, train loss: 0.322256, valid loss: 0.331988, valid accuracy: 91.59%\n",
      "iteration:  39100, epoch: 100, train loss: 0.333348, valid loss: 0.331988, valid accuracy: 91.59%\n",
      "iteration:  39200, epoch: 101, train loss: 0.312838, valid loss: 0.306793, valid accuracy: 92.55%\n",
      "iteration:  39300, epoch: 101, train loss: 0.319366, valid loss: 0.306793, valid accuracy: 92.55%\n",
      "iteration:  39400, epoch: 101, train loss: 0.311622, valid loss: 0.306793, valid accuracy: 92.55%\n",
      "iteration:  39500, epoch: 102, train loss: 0.322356, valid loss: 0.360316, valid accuracy: 91.21%\n",
      "iteration:  39600, epoch: 102, train loss: 0.315285, valid loss: 0.360316, valid accuracy: 91.21%\n",
      "iteration:  39700, epoch: 102, train loss: 0.328072, valid loss: 0.360316, valid accuracy: 91.21%\n",
      "iteration:  39800, epoch: 102, train loss: 0.322480, valid loss: 0.360316, valid accuracy: 91.21%\n",
      "iteration:  39900, epoch: 103, train loss: 0.323794, valid loss: 0.310354, valid accuracy: 92.68%\n",
      "iteration:  40000, epoch: 103, train loss: 0.332998, valid loss: 0.310354, valid accuracy: 92.68%\n",
      "iteration:  40100, epoch: 103, train loss: 0.326895, valid loss: 0.310354, valid accuracy: 92.68%\n",
      "iteration:  40200, epoch: 103, train loss: 0.325643, valid loss: 0.310354, valid accuracy: 92.68%\n",
      "iteration:  40300, epoch: 104, train loss: 0.321609, valid loss: 0.319328, valid accuracy: 92.61%\n",
      "iteration:  40400, epoch: 104, train loss: 0.318056, valid loss: 0.319328, valid accuracy: 92.61%\n",
      "iteration:  40500, epoch: 104, train loss: 0.328292, valid loss: 0.319328, valid accuracy: 92.61%\n",
      "iteration:  40600, epoch: 104, train loss: 0.318764, valid loss: 0.319328, valid accuracy: 92.61%\n",
      "iteration:  40700, epoch: 105, train loss: 0.335221, valid loss: 0.288375, valid accuracy: 92.96%\n",
      "iteration:  40800, epoch: 105, train loss: 0.328545, valid loss: 0.288375, valid accuracy: 92.96%\n",
      "iteration:  40900, epoch: 105, train loss: 0.320371, valid loss: 0.288375, valid accuracy: 92.96%\n",
      "iteration:  41000, epoch: 105, train loss: 0.332076, valid loss: 0.288375, valid accuracy: 92.96%\n",
      "iteration:  41100, epoch: 106, train loss: 0.308352, valid loss: 0.322453, valid accuracy: 92.09%\n",
      "iteration:  41200, epoch: 106, train loss: 0.318256, valid loss: 0.322453, valid accuracy: 92.09%\n",
      "iteration:  41300, epoch: 106, train loss: 0.321614, valid loss: 0.322453, valid accuracy: 92.09%\n",
      "iteration:  41400, epoch: 106, train loss: 0.322226, valid loss: 0.322453, valid accuracy: 92.09%\n",
      "iteration:  41500, epoch: 107, train loss: 0.319743, valid loss: 0.297653, valid accuracy: 92.16%\n",
      "iteration:  41600, epoch: 107, train loss: 0.316883, valid loss: 0.297653, valid accuracy: 92.16%\n",
      "iteration:  41700, epoch: 107, train loss: 0.322883, valid loss: 0.297653, valid accuracy: 92.16%\n",
      "iteration:  41800, epoch: 107, train loss: 0.317644, valid loss: 0.297653, valid accuracy: 92.16%\n",
      "iteration:  41900, epoch: 108, train loss: 0.316281, valid loss: 0.316958, valid accuracy: 92.15%\n",
      "iteration:  42000, epoch: 108, train loss: 0.321309, valid loss: 0.316958, valid accuracy: 92.15%\n",
      "iteration:  42100, epoch: 108, train loss: 0.319573, valid loss: 0.316958, valid accuracy: 92.15%\n",
      "iteration:  42200, epoch: 108, train loss: 0.322101, valid loss: 0.316958, valid accuracy: 92.15%\n",
      "iteration:  42300, epoch: 109, train loss: 0.317381, valid loss: 0.304673, valid accuracy: 92.29%\n",
      "iteration:  42400, epoch: 109, train loss: 0.327311, valid loss: 0.304673, valid accuracy: 92.29%\n",
      "iteration:  42500, epoch: 109, train loss: 0.328877, valid loss: 0.304673, valid accuracy: 92.29%\n",
      "iteration:  42600, epoch: 109, train loss: 0.318125, valid loss: 0.304673, valid accuracy: 92.29%\n",
      "iteration:  42700, epoch: 110, train loss: 0.328904, valid loss: 0.310565, valid accuracy: 92.03%\n",
      "iteration:  42800, epoch: 110, train loss: 0.322417, valid loss: 0.310565, valid accuracy: 92.03%\n",
      "iteration:  42900, epoch: 110, train loss: 0.321986, valid loss: 0.310565, valid accuracy: 92.03%\n",
      "iteration:  43000, epoch: 110, train loss: 0.320984, valid loss: 0.310565, valid accuracy: 92.03%\n",
      "iteration:  43100, epoch: 111, train loss: 0.330457, valid loss: 0.304241, valid accuracy: 92.28%\n",
      "iteration:  43200, epoch: 111, train loss: 0.320138, valid loss: 0.304241, valid accuracy: 92.28%\n",
      "iteration:  43300, epoch: 111, train loss: 0.312057, valid loss: 0.304241, valid accuracy: 92.28%\n",
      "iteration:  43400, epoch: 111, train loss: 0.325076, valid loss: 0.304241, valid accuracy: 92.28%\n",
      "iteration:  43500, epoch: 112, train loss: 0.305275, valid loss: 0.303728, valid accuracy: 92.45%\n",
      "iteration:  43600, epoch: 112, train loss: 0.318890, valid loss: 0.303728, valid accuracy: 92.45%\n",
      "iteration:  43700, epoch: 112, train loss: 0.328425, valid loss: 0.303728, valid accuracy: 92.45%\n",
      "iteration:  43800, epoch: 113, train loss: 0.333857, valid loss: 0.309559, valid accuracy: 92.90%\n",
      "iteration:  43900, epoch: 113, train loss: 0.313731, valid loss: 0.309559, valid accuracy: 92.90%\n",
      "iteration:  44000, epoch: 113, train loss: 0.327471, valid loss: 0.309559, valid accuracy: 92.90%\n",
      "iteration:  44100, epoch: 113, train loss: 0.315573, valid loss: 0.309559, valid accuracy: 92.90%\n",
      "iteration:  44200, epoch: 114, train loss: 0.311900, valid loss: 0.329039, valid accuracy: 92.04%\n",
      "iteration:  44300, epoch: 114, train loss: 0.328155, valid loss: 0.329039, valid accuracy: 92.04%\n",
      "iteration:  44400, epoch: 114, train loss: 0.322685, valid loss: 0.329039, valid accuracy: 92.04%\n",
      "iteration:  44500, epoch: 114, train loss: 0.329028, valid loss: 0.329039, valid accuracy: 92.04%\n",
      "iteration:  44600, epoch: 115, train loss: 0.330020, valid loss: 0.317232, valid accuracy: 92.40%\n",
      "iteration:  44700, epoch: 115, train loss: 0.319162, valid loss: 0.317232, valid accuracy: 92.40%\n",
      "iteration:  44800, epoch: 115, train loss: 0.324968, valid loss: 0.317232, valid accuracy: 92.40%\n",
      "iteration:  44900, epoch: 115, train loss: 0.320882, valid loss: 0.317232, valid accuracy: 92.40%\n",
      "iteration:  45000, epoch: 116, train loss: 0.326325, valid loss: 0.328256, valid accuracy: 91.53%\n",
      "iteration:  45100, epoch: 116, train loss: 0.323166, valid loss: 0.328256, valid accuracy: 91.53%\n",
      "iteration:  45200, epoch: 116, train loss: 0.332501, valid loss: 0.328256, valid accuracy: 91.53%\n",
      "iteration:  45300, epoch: 116, train loss: 0.317201, valid loss: 0.328256, valid accuracy: 91.53%\n",
      "iteration:  45400, epoch: 117, train loss: 0.314476, valid loss: 0.295143, valid accuracy: 92.24%\n",
      "iteration:  45500, epoch: 117, train loss: 0.314616, valid loss: 0.295143, valid accuracy: 92.24%\n",
      "iteration:  45600, epoch: 117, train loss: 0.320490, valid loss: 0.295143, valid accuracy: 92.24%\n",
      "iteration:  45700, epoch: 117, train loss: 0.314566, valid loss: 0.295143, valid accuracy: 92.24%\n",
      "iteration:  45800, epoch: 118, train loss: 0.310021, valid loss: 0.317854, valid accuracy: 92.25%\n",
      "iteration:  45900, epoch: 118, train loss: 0.319452, valid loss: 0.317854, valid accuracy: 92.25%\n",
      "iteration:  46000, epoch: 118, train loss: 0.313897, valid loss: 0.317854, valid accuracy: 92.25%\n",
      "iteration:  46100, epoch: 118, train loss: 0.314072, valid loss: 0.317854, valid accuracy: 92.25%\n",
      "iteration:  46200, epoch: 119, train loss: 0.313564, valid loss: 0.310424, valid accuracy: 92.60%\n",
      "iteration:  46300, epoch: 119, train loss: 0.315286, valid loss: 0.310424, valid accuracy: 92.60%\n",
      "iteration:  46400, epoch: 119, train loss: 0.313816, valid loss: 0.310424, valid accuracy: 92.60%\n",
      "iteration:  46500, epoch: 119, train loss: 0.307297, valid loss: 0.310424, valid accuracy: 92.60%\n",
      "iteration:  46600, epoch: 120, train loss: 0.320761, valid loss: 0.326111, valid accuracy: 92.24%\n",
      "iteration:  46700, epoch: 120, train loss: 0.310162, valid loss: 0.326111, valid accuracy: 92.24%\n",
      "iteration:  46800, epoch: 120, train loss: 0.310867, valid loss: 0.326111, valid accuracy: 92.24%\n",
      "iteration:  46900, epoch: 120, train loss: 0.315404, valid loss: 0.326111, valid accuracy: 92.24%\n"
     ]
    }
   ],
   "source": [
    "import paddle\n",
    "import paddle.fluid as fluid\n",
    "from paddle.utils.plot import Ploter\n",
    "import numpy as np\n",
    "import time\n",
    "import math\n",
    "import os\n",
    "\n",
    "epoch_num = 300   # 训练周期，取值一般为[1,300]\n",
    "train_batch = 128 # 训练批次，取值一般为[1,256]\n",
    "valid_batch = 128 # 验证批次，取值一般为[1,256]\n",
    "displays = 100    # 显示迭代\n",
    "\n",
    "start_lr = 0.00001                         # 开始学习率，取值一般为[1e-8,5e-1]\n",
    "based_lr = 0.1                             # 基础学习率，取值一般为[1e-8,5e-1]\n",
    "epoch_iters = math.ceil(50000/train_batch) # 每轮迭代数\n",
    "warmup_iter = 10 * epoch_iters             # 预热迭代数，取值一般为[1,10]\n",
    "\n",
    "momentum = 0.9     # 优化器动量\n",
    "l2_decay = 0.00005 # 正则化系数，取值一般为[1e-5,5e-4]\n",
    "epsilon = 0.05     # 标签平滑率，取值一般为[1e-2,1e-1]\n",
    "\n",
    "checkpoint = False                   # 断点标识\n",
    "model_path = './work/out/ssrnet'     # 模型路径\n",
    "result_txt = './work/out/result.txt' # 结果文件\n",
    "class_num  = 10                      # 类别数量\n",
    "\n",
    "with fluid.dygraph.guard():\n",
    "    # 准备数据\n",
    "    train_reader = paddle.batch(\n",
    "        reader=paddle.reader.shuffle(reader=paddle.dataset.cifar.train10(), buf_size=50000),\n",
    "        batch_size=train_batch)\n",
    "    \n",
    "    valid_reader = paddle.batch(\n",
    "        reader=paddle.dataset.cifar.test10(),\n",
    "        batch_size=valid_batch)\n",
    "    \n",
    "    # 声明模型\n",
    "    model = SSRNet()\n",
    "    \n",
    "    # 优化算法\n",
    "    consine_lr = fluid.layers.cosine_decay(based_lr, epoch_iters, epoch_num) # 余弦衰减策略\n",
    "    decayed_lr = fluid.layers.linear_lr_warmup(consine_lr, warmup_iter, start_lr, based_lr) # 线性预热策略\n",
    "    \n",
    "    optimizer = fluid.optimizer.Momentum(\n",
    "        learning_rate=decayed_lr,                           # 衰减学习策略\n",
    "        momentum=momentum,                                  # 优化动量系数\n",
    "        regularization=fluid.regularizer.L2Decay(l2_decay), # 正则衰减系数\n",
    "        parameter_list=model.parameters())\n",
    "    \n",
    "    # 加载断点\n",
    "    if checkpoint: # 是否加载断点文件\n",
    "        model_dict, optimizer_dict = fluid.load_dygraph(model_path) # 加载断点参数\n",
    "        model.set_dict(model_dict)                                  # 设置权重参数\n",
    "        optimizer.set_dict(optimizer_dict)                          # 设置优化参数\n",
    "    else:          # 否则删除结果文件\n",
    "        if os.path.exists(result_txt): # 如果存在结果文件\n",
    "            os.remove(result_txt)      # 那么删除结果文件\n",
    "    \n",
    "    # 初始训练\n",
    "    avg_train_loss = 0 # 平均训练损失\n",
    "    avg_valid_loss = 0 # 平均验证损失\n",
    "    avg_valid_accu = 0 # 平均验证精度\n",
    "    \n",
    "    iterator = 1                                # 迭代次数\n",
    "    train_prompt = \"Train loss\"                 # 训练标签\n",
    "    valid_prompt = \"Valid loss\"                 # 验证标签\n",
    "    ploter = Ploter(train_prompt, valid_prompt) # 训练图像\n",
    "    \n",
    "    best_epoch = 0           # 最好周期\n",
    "    best_accu = 0            # 最好精度\n",
    "    best_loss = 100.0        # 最好损失\n",
    "    train_time = time.time() # 训练时间\n",
    "    \n",
    "    # 开始训练\n",
    "    for epoch_id in range(epoch_num):\n",
    "        # 训练模型\n",
    "        model.train() # 设置训练\n",
    "        for batch_id, train_data in enumerate(train_reader()):\n",
    "            # 读取数据\n",
    "            image_data = np.array([x[0] for x in train_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取图像数据\n",
    "            image_data = train_augment(image_data)                                                        # 使用数据增强\n",
    "            image = fluid.dygraph.to_variable(image_data)                                                 # 转换数据类型\n",
    "\n",
    "            label_data = np.array([x[1] for x in train_data]).astype(np.int64)                        # 读取标签数据\n",
    "            label = fluid.dygraph.to_variable(label_data)                                             # 转换数据类型\n",
    "            label = fluid.layers.label_smooth(label=fluid.one_hot(label, class_num), epsilon=epsilon) # 使用标签平滑\n",
    "            label.stop_gradient = True                                                                # 停止梯度传播\n",
    "\n",
    "            # 前向传播\n",
    "            infer = model(image)\n",
    "            \n",
    "            # 计算损失\n",
    "            loss = fluid.layers.cross_entropy(infer, label, soft_label=True)\n",
    "            train_loss = fluid.layers.mean(loss)\n",
    "            \n",
    "            # 反向传播\n",
    "            train_loss.backward()\n",
    "            optimizer.minimize(train_loss)\n",
    "            model.clear_gradients()\n",
    "            \n",
    "            # 显示结果\n",
    "            if iterator % displays == 0:\n",
    "                # 显示图像\n",
    "                avg_train_loss = train_loss.numpy()[0]                # 设置训练损失\n",
    "                ploter.append(train_prompt, iterator, avg_train_loss) # 添加训练图像\n",
    "                ploter.plot()                                         # 显示训练图像\n",
    "                \n",
    "                # 打印结果\n",
    "                print(\"iteration: {:6d}, epoch: {:3d}, train loss: {:.6f}, valid loss: {:.6f}, valid accuracy: {:.2%}\".format(\n",
    "                    iterator, epoch_id+1, avg_train_loss, avg_valid_loss, avg_valid_accu))\n",
    "                \n",
    "                # 写入文件\n",
    "                with open(result_txt, 'a') as file:\n",
    "                    file.write(\"iteration: {:6d}, epoch: {:3d}, train loss: {:.6f}, valid loss: {:.6f}, valid accuracy: {:.2%}\\n\".format(\n",
    "                        iterator, epoch_id+1, avg_train_loss, avg_valid_loss, avg_valid_accu))\n",
    "            \n",
    "            # 增加迭代\n",
    "            iterator += 1\n",
    "            \n",
    "        # 验证模型\n",
    "        valid_loss_list = [] # 验证损失列表\n",
    "        valid_accu_list = [] # 验证精度列表\n",
    "        \n",
    "        model.eval()   # 设置验证\n",
    "        for batch_id, valid_data in enumerate(valid_reader()):\n",
    "            # 读取数据\n",
    "            image_data = np.array([x[0] for x in valid_data]).reshape((-1, 3, 32, 32)).astype(np.float32) # 读取图像数据\n",
    "            image_data = valid_augment(image_data)                                                        # 使用图像增强\n",
    "            image = fluid.dygraph.to_variable(image_data)                                                 # 转换数据类型\n",
    "            \n",
    "            label_data = np.array([x[1] for x in valid_data]).reshape((-1, 1)).astype(np.int64) # 读取标签数据\n",
    "            label = fluid.dygraph.to_variable(label_data)                                       # 转换数据类型\n",
    "            label.stop_gradient = True                                                          # 停止梯度传播\n",
    "            \n",
    "            # 前向传播\n",
    "            infer = model(image)\n",
    "            \n",
    "            # 计算精度\n",
    "            valid_accu = fluid.layers.accuracy(infer,label)\n",
    "            \n",
    "            valid_accu_list.append(valid_accu.numpy())\n",
    "            \n",
    "            # 计算损失\n",
    "            loss = fluid.layers.cross_entropy(infer, label)\n",
    "            valid_loss = fluid.layers.mean(loss)\n",
    "            \n",
    "            valid_loss_list.append(valid_loss.numpy())\n",
    "        \n",
    "        # 设置结果\n",
    "        avg_valid_accu = np.mean(valid_accu_list)             # 设置验证精度\n",
    "        \n",
    "        avg_valid_loss = np.mean(valid_loss_list)             # 设置验证损失\n",
    "        ploter.append(valid_prompt, iterator, avg_valid_loss) # 添加训练图像\n",
    "        \n",
    "        # 保存模型\n",
    "        fluid.save_dygraph(model.state_dict(), model_path)     # 保存权重参数\n",
    "        fluid.save_dygraph(optimizer.state_dict(), model_path) # 保存优化参数\n",
    "        \n",
    "        if avg_valid_loss < best_loss:\n",
    "            fluid.save_dygraph(model.state_dict(), model_path + '-best') # 保存权重\n",
    "            \n",
    "            best_epoch = epoch_id + 1                                    # 更新迭代\n",
    "            best_accu = avg_valid_accu                                   # 更新精度\n",
    "            best_loss = avg_valid_loss                                   # 更新损失\n",
    "    \n",
    "    # 显示结果\n",
    "    train_time = time.time() - train_time # 设置训练时间\n",
    "    print('complete - train time: {:.0f}s, best epoch: {:3d}, best loss: {:.6f}, best accuracy: {:.2%}'.format(\n",
    "        train_time, best_epoch, best_loss, best_accu))\n",
    "    \n",
    "    # 写入文件\n",
    "    with open(result_txt, 'a') as file:\n",
    "        file.write('complete - train time: {:.0f}s, best epoch: {:3d}, best loss: {:.6f}, best accuracy: {:.2%}\\n'.format(\n",
    "            train_time, best_epoch, best_loss, best_accu))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "### 模型预测"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "import paddle.fluid as fluid\n",
    "from PIL import Image\n",
    "import numpy as np\n",
    "import time\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "image_path = './work/out/img.png' # 图片路径\n",
    "model_path = './work/out/ssrnet-best' # 模型路径\n",
    "\n",
    "# 加载图像\n",
    "def load_image(image_path):\n",
    "    \"\"\"\n",
    "    功能:\n",
    "        读取图像并转换到输入格式\n",
    "    输入:\n",
    "        image_path - 输入图像路径\n",
    "    输出:\n",
    "        image - 输出图像\n",
    "    \"\"\"\n",
    "    # 读取图像\n",
    "    image = Image.open(image_path) # 打开图像文件\n",
    "    \n",
    "    # 转换格式\n",
    "    image = image.resize((32, 32), Image.ANTIALIAS) # 调整图像大小\n",
    "    image = np.array(image, dtype=np.float32) # 转换数据格式，数据类型转换为float32\n",
    "\n",
    "    # 减去均值\n",
    "    mean = np.array([0.4914, 0.4822, 0.4465]).reshape((1, 1, -1)) # cifar数据集通道平均值\n",
    "    stdv = np.array([0.2471, 0.2435, 0.2616]).reshape((1, 1, -1)) # cifar数据集通道标准差\n",
    "    \n",
    "    image = (image/255.0 - mean) / stdv # 对图像进行归一化\n",
    "    image = image.transpose((2, 0, 1)).astype(np.float32) # 数据格式从HWC转换为CHW，数据类型转换为float32\n",
    "    \n",
    "    # 增加维度\n",
    "    image = np.expand_dims(image, axis=0) # 增加数据维度\n",
    "    \n",
    "    return image\n",
    "\n",
    "# 预测图像\n",
    "with fluid.dygraph.guard():\n",
    "    # 读取图像\n",
    "    image = load_image(image_path)\n",
    "    image = fluid.dygraph.to_variable(image)\n",
    "    \n",
    "    # 加载模型\n",
    "    model = SSRNet()                               # 加载模型\n",
    "    model_dict, _ = fluid.load_dygraph(model_path) # 加载权重\n",
    "    model.set_dict(model_dict)                     # 设置权重\n",
    "    model.eval()                                   # 设置验证\n",
    "    \n",
    "    # 前向传播\n",
    "    infer_time = time.time()              # 推断开始时间\n",
    "    infer = model(image)\n",
    "    infer_time = time.time() - infer_time # 推断结束时间\n",
    "    \n",
    "    # 显示结果\n",
    "    vlist = [\"airplane\", \"automobile\", \"bird\", \"cat\", \"deer\", \"dog\", \"frog\", \"horse\", \"ship\", \"truck\"] # 标签名称列表\n",
    "    print('infer time: {:f}s, infer value: {}'.format(infer_time, vlist[np.argmax(infer.numpy())]) )\n",
    "    \n",
    "    image = Image.open(image_path) # 打开图像文件\n",
    "    plt.figure(figsize=(3, 3))     # 设置显示大小\n",
    "    plt.imshow(image)              # 设置显示图像\n",
    "    plt.show()                     # 显示图像文件"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "PaddlePaddle 1.8.4 (Python 3.5)",
   "language": "python",
   "name": "py35-paddle1.2.0"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
