{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 查看FashionMNIST原始数据格式"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.363026Z",
     "start_time": "2025-06-26T01:43:29.447990Z"
    }
   },
   "outputs": [],
   "source": [
    "import torch\n",
    "import torchvision\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from torchvision import datasets, transforms\n",
    "from deeplearning_func import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
    "from deeplearning_func import evaluate_classification_model as evaluate_model\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 加载数据并处理为tensor"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.407799Z",
     "start_time": "2025-06-26T01:43:32.363026Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "训练集大小: 45000\n",
      "验证集大小: 5000\n"
     ]
    }
   ],
   "source": [
    "# 加载CIFAR-10数据集\n",
    "import os\n",
    "import pandas as pd\n",
    "from PIL import Image\n",
    "from torch.utils.data import Dataset\n",
    "\n",
    "# 定义CIFAR-10数据集类\n",
    "class CIFAR10Dataset(Dataset):\n",
    "    def __init__(self, img_dir, labels_df, transform=None):\n",
    "        self.img_dir = img_dir\n",
    "        self.transform = transform\n",
    "        \n",
    "        self.labels_df = labels_df\n",
    "        self.img_names = self.labels_df.iloc[:, 0].values.astype(str)  # 第一列是图片名称，确保为字符串类型\n",
    "        \n",
    "        # 类别名称字典，使用字典可以提高查找速度\n",
    "        self.class_names_dict = {'airplane': 0, 'automobile': 1, 'bird': 2, 'cat': 3, \n",
    "                                 'deer': 4, 'dog': 5, 'frog': 6, 'horse': 7, 'ship': 8, 'truck': 9}\n",
    "        # 将文本标签转换为数字ID\n",
    "        self.labels = [self.class_names_dict[label] for label in self.labels_df.iloc[:, 1].values]\n",
    "        \n",
    "    def __len__(self):\n",
    "        return len(self.labels)\n",
    "    \n",
    "    def __getitem__(self, idx):\n",
    "        img_path = os.path.join(self.img_dir, self.img_names[idx] + '.png') #图片路径\n",
    "        image = Image.open(img_path) #打开图片\n",
    "        label = self.labels[idx]\n",
    "        \n",
    "        if self.transform:\n",
    "            image_tensor = self.transform(image)\n",
    "            \n",
    "        return image_tensor, label\n",
    "\n",
    "# 读取标签文件\n",
    "img_dir = r\"D:\\cifar-10\\train\\train\"\n",
    "labels_file = r\"D:\\cifar-10\\trainLabels.csv\"\n",
    "labels_df = pd.read_csv(labels_file)\n",
    "\n",
    "# 划分数据集\n",
    "train_size = 45000\n",
    "val_size = 5000\n",
    "train_df = labels_df.iloc[:train_size]\n",
    "val_df = labels_df.iloc[train_size:]\n",
    "\n",
    "# 定义训练集数据预处理（包含图像增强）\n",
    "train_transform = transforms.Compose([\n",
    "    transforms.ToTensor(),\n",
    "    transforms.RandomRotation(40), #随机旋转\n",
    "    transforms.RandomHorizontalFlip(),  #随机水平翻转\n",
    "    transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
    "])\n",
    "\n",
    "# 定义验证集数据预处理（不做图像增强）\n",
    "val_transform = transforms.Compose([\n",
    "    transforms.ToTensor(),\n",
    "    transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
    "])\n",
    "\n",
    "# 创建训练集和验证集\n",
    "train_dataset = CIFAR10Dataset(img_dir=img_dir, labels_df=train_df, transform=train_transform)\n",
    "val_dataset = CIFAR10Dataset(img_dir=img_dir, labels_df=val_df, transform=val_transform)\n",
    "\n",
    "# 定义类别名称\n",
    "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
    "\n",
    "# 查看数据集基本信息\n",
    "print(f\"训练集大小: {len(train_dataset)}\")\n",
    "print(f\"验证集大小: {len(val_dataset)}\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [],
   "source": [
    "def cal_mean_std(ds):\n",
    "    mean = 0.\n",
    "    std = 0.\n",
    "    for img, _ in ds:\n",
    "        mean += img.mean(dim=(1, 2)) #dim=(1, 2)表示在通道维度上求平均\n",
    "        std += img.std(dim=(1, 2))  #dim=(1, 2)表示在通道维度上求标准差\n",
    "    mean /= len(ds)\n",
    "    std /= len(ds)\n",
    "    return mean, std\n",
    "# cal_mean_std(train_dataset)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 把数据集划分为训练集45000和验证集5000，并给DataLoader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.144223Z",
     "start_time": "2025-06-26T01:43:33.135368Z"
    }
   },
   "outputs": [],
   "source": [
    "\n",
    "# 创建数据加载器\n",
    "batch_size = 64\n",
    "train_loader = torch.utils.data.DataLoader(\n",
    "    train_dataset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
    ")\n",
    "\n",
    "val_loader = torch.utils.data.DataLoader(\n",
    "    val_dataset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=False\n",
    ")\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 搭建模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([20, 100])\n"
     ]
    }
   ],
   "source": [
    "#理解每个接口的方法，单独写例子\n",
    "import torch.nn as nn\n",
    "m=nn.BatchNorm1d(100)\n",
    "x=torch.randn(20,100)\n",
    "print(m(x).shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 搭建模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.152657Z",
     "start_time": "2025-06-26T01:43:33.148120Z"
    }
   },
   "outputs": [],
   "source": [
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "class NeuralNetwork(nn.Module):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        \n",
    "        # 第一组卷积层 - 使用Sequential组织\n",
    "        self.conv_block1 = nn.Sequential(\n",
    "            nn.Conv2d(3, 128, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(128),\n",
    "            nn.ReLU(),\n",
    "            nn.Conv2d(128, 128, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(128),\n",
    "            nn.ReLU(),\n",
    "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
    "        )\n",
    "        \n",
    "        # 第二组卷积层 - 使用Sequential组织\n",
    "        self.conv_block2 = nn.Sequential(\n",
    "            nn.Conv2d(128, 256, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(256),\n",
    "            nn.ReLU(),\n",
    "            nn.Conv2d(256, 256, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(256),\n",
    "            nn.ReLU(),\n",
    "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
    "        )\n",
    "        \n",
    "        # 第三组卷积层 - 使用Sequential组织\n",
    "        self.conv_block3 = nn.Sequential(\n",
    "            nn.Conv2d(256, 512, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(512),\n",
    "            nn.ReLU(),\n",
    "            nn.Conv2d(512, 512, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(512),\n",
    "            nn.ReLU(),\n",
    "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
    "        )\n",
    "        \n",
    "        # 全连接层 - 使用Sequential组织\n",
    "        self.classifier = nn.Sequential(\n",
    "            nn.Linear(512 * 4 * 4, 1024),\n",
    "            nn.ReLU(),\n",
    "            nn.Linear(1024, 10)\n",
    "        )\n",
    "        \n",
    "        # 初始化权重\n",
    "        self.init_weights()\n",
    "        \n",
    "    def init_weights(self):\n",
    "        \"\"\"使用 xavier 均匀分布来初始化卷积层和全连接层的权重\"\"\"\n",
    "        for m in self.modules():\n",
    "            if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):\n",
    "                nn.init.xavier_uniform_(m.weight)\n",
    "                if m.bias is not None:\n",
    "                    nn.init.zeros_(m.bias)\n",
    "    \n",
    "    def forward(self, x):\n",
    "        # 前向传播使用Sequential定义的块\n",
    "        x = self.conv_block1(x)\n",
    "        x = self.conv_block2(x)\n",
    "        x = self.conv_block3(x)\n",
    "        \n",
    "        # 展平\n",
    "        x = x.view(x.size(0), -1)\n",
    "        \n",
    "        # 分类器\n",
    "        x = self.classifier(x)\n",
    "        \n",
    "        return x\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.185031Z",
     "start_time": "2025-06-26T01:43:33.152657Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "批次图像形状: torch.Size([64, 3, 32, 32])\n",
      "批次标签形状: torch.Size([64])\n",
      "----------------------------------------------------------------------------------------------------\n",
      "torch.Size([64, 10])\n"
     ]
    }
   ],
   "source": [
    "# 实例化模型\n",
    "model = NeuralNetwork()\n",
    "\n",
    "# 从train_loader获取第一个批次的数据\n",
    "dataiter = iter(train_loader)\n",
    "images, labels = next(dataiter)\n",
    "\n",
    "# 查看批次数据的形状\n",
    "print(\"批次图像形状:\", images.shape)\n",
    "print(\"批次标签形状:\", labels.shape)\n",
    "\n",
    "\n",
    "print('-'*100)\n",
    "# 进行前向传播\n",
    "with torch.no_grad():  # 不需要计算梯度\n",
    "    outputs = model(images)\n",
    "    \n",
    "\n",
    "print(outputs.shape)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.203053Z",
     "start_time": "2025-06-26T01:43:33.199532Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "需要求梯度的参数总量: 12979850\n",
      "模型总参数量: 12979850\n",
      "\n",
      "各层参数量明细:\n",
      "conv_block1.0.weight: 3456 参数\n",
      "conv_block1.0.bias: 128 参数\n",
      "conv_block1.1.weight: 128 参数\n",
      "conv_block1.1.bias: 128 参数\n",
      "conv_block1.3.weight: 147456 参数\n",
      "conv_block1.3.bias: 128 参数\n",
      "conv_block1.4.weight: 128 参数\n",
      "conv_block1.4.bias: 128 参数\n",
      "conv_block2.0.weight: 294912 参数\n",
      "conv_block2.0.bias: 256 参数\n",
      "conv_block2.1.weight: 256 参数\n",
      "conv_block2.1.bias: 256 参数\n",
      "conv_block2.3.weight: 589824 参数\n",
      "conv_block2.3.bias: 256 参数\n",
      "conv_block2.4.weight: 256 参数\n",
      "conv_block2.4.bias: 256 参数\n",
      "conv_block3.0.weight: 1179648 参数\n",
      "conv_block3.0.bias: 512 参数\n",
      "conv_block3.1.weight: 512 参数\n",
      "conv_block3.1.bias: 512 参数\n",
      "conv_block3.3.weight: 2359296 参数\n",
      "conv_block3.3.bias: 512 参数\n",
      "conv_block3.4.weight: 512 参数\n",
      "conv_block3.4.bias: 512 参数\n",
      "classifier.0.weight: 8388608 参数\n",
      "classifier.0.bias: 1024 参数\n",
      "classifier.2.weight: 10240 参数\n",
      "classifier.2.bias: 10 参数\n"
     ]
    }
   ],
   "source": [
    "# 计算模型的总参数量\n",
    "# 统计需要求梯度的参数总量\n",
    "total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
    "print(f\"需要求梯度的参数总量: {total_params}\")\n",
    "\n",
    "# 统计所有参数总量\n",
    "all_params = sum(p.numel() for p in model.parameters())\n",
    "print(f\"模型总参数量: {all_params}\")\n",
    "\n",
    "# 查看每层参数量明细\n",
    "print(\"\\n各层参数量明细:\")\n",
    "for name, param in model.named_parameters():\n",
    "    print(f\"{name}: {param.numel()} 参数\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "294912"
      ]
     },
     "execution_count": 45,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "128*3*3*256"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 各层参数量明细:\n",
    "conv1.weight: 288 参数 3*3*1*32\n",
    "conv1.bias: 32 参数\n",
    "conv2.weight: 9216 参数 3*3*32*32\n",
    "conv2.bias: 32 参数  \n",
    "conv3.weight: 18432 参数 3*3*32*64\n",
    "conv3.bias: 64 参数\n",
    "conv4.weight: 36864 参数  3*3*64*64\n",
    "conv4.bias: 64 参数\n",
    "conv5.weight: 73728 参数\n",
    "conv5.bias: 128 参数\n",
    "conv6.weight: 147456 参数\n",
    "conv6.bias: 128 参数\n",
    "fc1.weight: 294912 参数 128*3*3*256\n",
    "fc1.bias: 256 参数\n",
    "fc2.weight: 2560 参数\n",
    "fc2.bias: 10 参数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.217395Z",
     "start_time": "2025-06-26T01:43:33.203561Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "OrderedDict([('conv_block1.0.weight',\n",
       "              tensor([[[[ 0.0100,  0.0134, -0.0088],\n",
       "                        [ 0.0354,  0.0044, -0.0484],\n",
       "                        [ 0.0299, -0.0614, -0.0195]],\n",
       "              \n",
       "                       [[-0.0590, -0.0123, -0.0088],\n",
       "                        [ 0.0671,  0.0258, -0.0033],\n",
       "                        [ 0.0052, -0.0684, -0.0653]],\n",
       "              \n",
       "                       [[-0.0488,  0.0162,  0.0465],\n",
       "                        [ 0.0113,  0.0559, -0.0701],\n",
       "                        [ 0.0201,  0.0264,  0.0697]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0291,  0.0525, -0.0599],\n",
       "                        [-0.0650, -0.0167,  0.0234],\n",
       "                        [ 0.0034, -0.0126,  0.0265]],\n",
       "              \n",
       "                       [[ 0.0281,  0.0174, -0.0315],\n",
       "                        [ 0.0700,  0.0666, -0.0140],\n",
       "                        [-0.0087,  0.0430, -0.0365]],\n",
       "              \n",
       "                       [[-0.0093, -0.0269, -0.0389],\n",
       "                        [-0.0391,  0.0506, -0.0004],\n",
       "                        [ 0.0397,  0.0207,  0.0452]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0045, -0.0641,  0.0219],\n",
       "                        [ 0.0161,  0.0205, -0.0443],\n",
       "                        [ 0.0407, -0.0706,  0.0128]],\n",
       "              \n",
       "                       [[ 0.0209, -0.0195, -0.0680],\n",
       "                        [-0.0546, -0.0468,  0.0367],\n",
       "                        [ 0.0458, -0.0090, -0.0542]],\n",
       "              \n",
       "                       [[-0.0447,  0.0318,  0.0529],\n",
       "                        [ 0.0062, -0.0330,  0.0382],\n",
       "                        [-0.0388,  0.0594, -0.0414]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0075, -0.0590,  0.0146],\n",
       "                        [ 0.0641, -0.0064, -0.0321],\n",
       "                        [ 0.0061, -0.0186, -0.0333]],\n",
       "              \n",
       "                       [[-0.0594, -0.0293,  0.0160],\n",
       "                        [ 0.0239, -0.0310, -0.0227],\n",
       "                        [ 0.0152,  0.0228, -0.0259]],\n",
       "              \n",
       "                       [[ 0.0095, -0.0004,  0.0178],\n",
       "                        [-0.0366, -0.0643, -0.0596],\n",
       "                        [-0.0472,  0.0483,  0.0209]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0645, -0.0603,  0.0516],\n",
       "                        [ 0.0037, -0.0505,  0.0469],\n",
       "                        [-0.0536,  0.0059, -0.0668]],\n",
       "              \n",
       "                       [[-0.0584, -0.0250, -0.0553],\n",
       "                        [-0.0263,  0.0342, -0.0608],\n",
       "                        [-0.0320,  0.0541, -0.0061]],\n",
       "              \n",
       "                       [[-0.0674,  0.0165,  0.0094],\n",
       "                        [ 0.0012,  0.0510,  0.0510],\n",
       "                        [-0.0583, -0.0329,  0.0350]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0157, -0.0049,  0.0670],\n",
       "                        [-0.0646, -0.0504, -0.0009],\n",
       "                        [ 0.0202,  0.0232,  0.0078]],\n",
       "              \n",
       "                       [[-0.0344,  0.0407,  0.0533],\n",
       "                        [ 0.0071, -0.0220,  0.0070],\n",
       "                        [-0.0232,  0.0594, -0.0195]],\n",
       "              \n",
       "                       [[-0.0470,  0.0253,  0.0337],\n",
       "                        [ 0.0243,  0.0406, -0.0571],\n",
       "                        [ 0.0560,  0.0124,  0.0332]]]])),\n",
       "             ('conv_block1.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1.])),\n",
       "             ('conv_block1.1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.1.running_mean',\n",
       "              tensor([-6.6024e-04, -5.6920e-03,  4.6142e-03, -6.9911e-03, -1.8248e-03,\n",
       "                       3.7345e-03, -7.0578e-03,  3.1407e-03, -2.3914e-04, -6.6162e-03,\n",
       "                      -7.4550e-03, -7.5129e-03, -1.2418e-02,  3.6501e-03, -4.8143e-03,\n",
       "                       5.9318e-03, -2.8029e-04, -9.8418e-04, -3.8525e-03,  6.9607e-03,\n",
       "                       7.4117e-03, -1.4157e-03,  5.0879e-04,  6.2803e-03, -3.7047e-03,\n",
       "                      -1.0208e-02,  2.1854e-03, -4.6496e-05,  1.7777e-04, -5.8869e-03,\n",
       "                      -4.9207e-03,  2.0672e-03, -6.0610e-03, -2.0517e-03,  3.4061e-03,\n",
       "                      -1.1331e-03,  1.1610e-02, -1.2572e-02, -3.6766e-04, -1.2119e-03,\n",
       "                       7.4389e-03, -3.8656e-03,  1.6042e-04,  3.7903e-04, -4.7758e-03,\n",
       "                       3.8428e-04, -6.6765e-03,  4.7133e-03,  5.1214e-03,  3.9248e-03,\n",
       "                       1.6705e-03,  5.6731e-03, -2.0254e-04, -1.1140e-02,  1.1117e-02,\n",
       "                       3.6644e-03, -1.4399e-03, -8.3520e-03, -5.2544e-04, -2.5130e-03,\n",
       "                      -4.1278e-03,  1.1256e-03, -5.5425e-03,  1.2390e-03, -1.4521e-04,\n",
       "                      -8.4381e-03,  1.0455e-02, -1.8504e-03,  6.9788e-03, -4.8703e-04,\n",
       "                       4.4061e-03,  5.0958e-03,  1.6061e-02, -8.7649e-03, -4.1179e-03,\n",
       "                      -2.2379e-03, -5.0988e-04, -1.2516e-02, -2.7774e-03, -4.8962e-03,\n",
       "                      -7.5113e-03,  9.4641e-04,  2.0870e-03, -9.9241e-03, -1.9286e-03,\n",
       "                      -7.7556e-03,  7.6926e-03,  9.6492e-03, -4.7911e-03, -4.6392e-03,\n",
       "                      -2.4724e-03,  7.7097e-03,  3.6256e-03,  1.2382e-02, -5.3014e-03,\n",
       "                       6.8200e-03, -6.6281e-03, -1.0035e-02,  1.0330e-03,  2.6372e-03,\n",
       "                       4.2900e-03,  2.2939e-03, -1.3473e-02, -1.0723e-02,  7.7037e-03,\n",
       "                       6.1669e-04, -4.5184e-03, -1.9040e-04,  5.3019e-03, -4.7732e-03,\n",
       "                       5.0882e-03,  1.8459e-03,  1.5699e-03, -1.1318e-03, -7.9053e-03,\n",
       "                       4.3724e-03,  9.1926e-03,  2.4282e-03,  1.3931e-03,  9.6001e-03,\n",
       "                      -8.9455e-03, -6.4838e-03,  1.5452e-02, -9.5347e-03, -1.4339e-05,\n",
       "                       8.0884e-03,  7.4402e-03, -5.7680e-03])),\n",
       "             ('conv_block1.1.running_var',\n",
       "              tensor([0.9025, 0.9054, 0.9052, 0.9183, 0.9037, 0.9030, 0.9101, 0.9043, 0.9018,\n",
       "                      0.9118, 0.9135, 0.9082, 0.9209, 0.9023, 0.9061, 0.9076, 0.9059, 0.9017,\n",
       "                      0.9066, 0.9067, 0.9141, 0.9029, 0.9025, 0.9048, 0.9027, 0.9262, 0.9122,\n",
       "                      0.9023, 0.9021, 0.9096, 0.9033, 0.9013, 0.9047, 0.9029, 0.9036, 0.9048,\n",
       "                      0.9197, 0.9333, 0.9029, 0.9013, 0.9255, 0.9049, 0.9017, 0.9023, 0.9064,\n",
       "                      0.9025, 0.9086, 0.9102, 0.9036, 0.9027, 0.9019, 0.9091, 0.9010, 0.9189,\n",
       "                      0.9207, 0.9044, 0.9030, 0.9126, 0.9028, 0.9069, 0.9044, 0.9027, 0.9039,\n",
       "                      0.9049, 0.9064, 0.9174, 0.9300, 0.9044, 0.9159, 0.9009, 0.9042, 0.9068,\n",
       "                      0.9513, 0.9170, 0.9040, 0.9039, 0.9007, 0.9152, 0.9025, 0.9042, 0.9082,\n",
       "                      0.9046, 0.9026, 0.9157, 0.9043, 0.9192, 0.9090, 0.9154, 0.9032, 0.9056,\n",
       "                      0.9031, 0.9112, 0.9081, 0.9263, 0.9043, 0.9085, 0.9065, 0.9157, 0.9073,\n",
       "                      0.9032, 0.9073, 0.9025, 0.9290, 0.9140, 0.9133, 0.9014, 0.9049, 0.9020,\n",
       "                      0.9063, 0.9029, 0.9045, 0.9011, 0.9040, 0.9011, 0.9159, 0.9071, 0.9135,\n",
       "                      0.9014, 0.9023, 0.9110, 0.9217, 0.9064, 0.9446, 0.9181, 0.9038, 0.9104,\n",
       "                      0.9182, 0.9056])),\n",
       "             ('conv_block1.1.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block1.3.weight',\n",
       "              tensor([[[[ 0.0043,  0.0494, -0.0156],\n",
       "                        [ 0.0279,  0.0276, -0.0176],\n",
       "                        [-0.0275,  0.0039,  0.0038]],\n",
       "              \n",
       "                       [[ 0.0163, -0.0329,  0.0195],\n",
       "                        [-0.0213, -0.0078,  0.0249],\n",
       "                        [-0.0247, -0.0131,  0.0396]],\n",
       "              \n",
       "                       [[-0.0418, -0.0134, -0.0013],\n",
       "                        [-0.0021, -0.0196, -0.0096],\n",
       "                        [-0.0005,  0.0475,  0.0400]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0459, -0.0499,  0.0161],\n",
       "                        [ 0.0260,  0.0381, -0.0321],\n",
       "                        [ 0.0033,  0.0384, -0.0272]],\n",
       "              \n",
       "                       [[ 0.0348, -0.0212, -0.0085],\n",
       "                        [ 0.0167, -0.0478,  0.0063],\n",
       "                        [ 0.0311,  0.0364, -0.0306]],\n",
       "              \n",
       "                       [[ 0.0485, -0.0473, -0.0104],\n",
       "                        [-0.0389,  0.0285, -0.0013],\n",
       "                        [-0.0202,  0.0122,  0.0197]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0484, -0.0396, -0.0468],\n",
       "                        [-0.0011,  0.0446, -0.0208],\n",
       "                        [-0.0267, -0.0472,  0.0115]],\n",
       "              \n",
       "                       [[ 0.0272, -0.0059,  0.0405],\n",
       "                        [ 0.0236, -0.0126, -0.0030],\n",
       "                        [-0.0509, -0.0113,  0.0159]],\n",
       "              \n",
       "                       [[ 0.0346,  0.0222,  0.0335],\n",
       "                        [ 0.0322,  0.0275,  0.0496],\n",
       "                        [-0.0423, -0.0097,  0.0478]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0055, -0.0160, -0.0360],\n",
       "                        [ 0.0275, -0.0416,  0.0064],\n",
       "                        [ 0.0371,  0.0285,  0.0171]],\n",
       "              \n",
       "                       [[-0.0399,  0.0472, -0.0046],\n",
       "                        [ 0.0154, -0.0213,  0.0490],\n",
       "                        [-0.0311,  0.0269, -0.0359]],\n",
       "              \n",
       "                       [[ 0.0481, -0.0078,  0.0036],\n",
       "                        [ 0.0161,  0.0270,  0.0141],\n",
       "                        [-0.0012, -0.0502,  0.0375]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0325, -0.0498,  0.0037],\n",
       "                        [ 0.0303,  0.0194, -0.0012],\n",
       "                        [-0.0255,  0.0360, -0.0189]],\n",
       "              \n",
       "                       [[-0.0161, -0.0447,  0.0009],\n",
       "                        [-0.0112,  0.0484, -0.0120],\n",
       "                        [-0.0300,  0.0463, -0.0177]],\n",
       "              \n",
       "                       [[-0.0254, -0.0338, -0.0299],\n",
       "                        [ 0.0441,  0.0055,  0.0233],\n",
       "                        [ 0.0023,  0.0426,  0.0069]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0051, -0.0002, -0.0269],\n",
       "                        [-0.0430, -0.0364, -0.0112],\n",
       "                        [ 0.0246, -0.0323, -0.0343]],\n",
       "              \n",
       "                       [[-0.0340,  0.0097,  0.0432],\n",
       "                        [-0.0496,  0.0323,  0.0370],\n",
       "                        [ 0.0291,  0.0249, -0.0354]],\n",
       "              \n",
       "                       [[ 0.0191, -0.0106, -0.0306],\n",
       "                        [ 0.0159, -0.0050, -0.0076],\n",
       "                        [-0.0115, -0.0489,  0.0277]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[-0.0428, -0.0478, -0.0221],\n",
       "                        [-0.0186, -0.0464, -0.0345],\n",
       "                        [-0.0036, -0.0389,  0.0069]],\n",
       "              \n",
       "                       [[ 0.0136, -0.0359, -0.0426],\n",
       "                        [ 0.0320,  0.0283, -0.0210],\n",
       "                        [ 0.0427,  0.0070, -0.0438]],\n",
       "              \n",
       "                       [[-0.0398, -0.0340, -0.0136],\n",
       "                        [-0.0334,  0.0209, -0.0329],\n",
       "                        [ 0.0437, -0.0275,  0.0157]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0386,  0.0493,  0.0137],\n",
       "                        [ 0.0449, -0.0073,  0.0495],\n",
       "                        [-0.0085,  0.0488,  0.0234]],\n",
       "              \n",
       "                       [[-0.0341,  0.0148,  0.0307],\n",
       "                        [-0.0226, -0.0241,  0.0190],\n",
       "                        [ 0.0255,  0.0448,  0.0437]],\n",
       "              \n",
       "                       [[ 0.0024,  0.0166,  0.0208],\n",
       "                        [ 0.0013, -0.0011, -0.0471],\n",
       "                        [ 0.0182, -0.0406,  0.0386]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0488,  0.0438,  0.0358],\n",
       "                        [-0.0104, -0.0061,  0.0477],\n",
       "                        [-0.0380, -0.0118, -0.0169]],\n",
       "              \n",
       "                       [[-0.0195, -0.0204,  0.0096],\n",
       "                        [-0.0220,  0.0144, -0.0392],\n",
       "                        [-0.0336,  0.0379,  0.0128]],\n",
       "              \n",
       "                       [[-0.0328,  0.0464,  0.0389],\n",
       "                        [ 0.0266, -0.0109, -0.0305],\n",
       "                        [-0.0268, -0.0256, -0.0286]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0496,  0.0503, -0.0413],\n",
       "                        [-0.0328, -0.0351, -0.0175],\n",
       "                        [ 0.0105, -0.0397,  0.0254]],\n",
       "              \n",
       "                       [[-0.0299, -0.0497,  0.0275],\n",
       "                        [-0.0292,  0.0259,  0.0414],\n",
       "                        [ 0.0267,  0.0208, -0.0487]],\n",
       "              \n",
       "                       [[ 0.0411, -0.0117, -0.0394],\n",
       "                        [ 0.0324, -0.0076,  0.0238],\n",
       "                        [-0.0253, -0.0158,  0.0299]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0417,  0.0390, -0.0335],\n",
       "                        [ 0.0389,  0.0053, -0.0126],\n",
       "                        [ 0.0380,  0.0062,  0.0442]],\n",
       "              \n",
       "                       [[ 0.0450,  0.0217,  0.0119],\n",
       "                        [-0.0321, -0.0420, -0.0409],\n",
       "                        [-0.0414,  0.0012, -0.0056]],\n",
       "              \n",
       "                       [[ 0.0488,  0.0304, -0.0334],\n",
       "                        [-0.0345, -0.0149, -0.0311],\n",
       "                        [-0.0242,  0.0206, -0.0065]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0110,  0.0419, -0.0117],\n",
       "                        [-0.0297,  0.0507,  0.0305],\n",
       "                        [-0.0134,  0.0078, -0.0172]],\n",
       "              \n",
       "                       [[-0.0047,  0.0068, -0.0241],\n",
       "                        [-0.0010, -0.0498, -0.0474],\n",
       "                        [-0.0487,  0.0189,  0.0323]],\n",
       "              \n",
       "                       [[-0.0157, -0.0303, -0.0168],\n",
       "                        [ 0.0333,  0.0262,  0.0196],\n",
       "                        [ 0.0373,  0.0317, -0.0187]]]])),\n",
       "             ('conv_block1.3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1.])),\n",
       "             ('conv_block1.4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.4.running_mean',\n",
       "              tensor([ 5.8378e-03, -3.6203e-03,  3.2188e-02, -1.3418e-02,  2.2634e-02,\n",
       "                       5.3996e-02,  6.2726e-02,  4.2027e-03,  3.8679e-02, -7.5290e-04,\n",
       "                       2.2017e-02,  8.7979e-03, -1.5375e-02,  3.3029e-02, -6.4632e-02,\n",
       "                       4.8156e-02, -1.5711e-03,  5.9693e-02,  3.2957e-02,  6.3657e-03,\n",
       "                       6.1197e-02,  1.2087e-02,  5.3965e-02, -6.4626e-02, -4.0792e-03,\n",
       "                       7.9594e-02,  3.6500e-02,  3.4445e-03, -2.3069e-02, -2.5628e-02,\n",
       "                      -1.5706e-03, -3.5391e-03,  7.8147e-02, -3.4452e-03,  1.5379e-02,\n",
       "                       6.0610e-02, -6.0018e-02,  5.3752e-03, -2.9181e-02,  3.6821e-03,\n",
       "                       3.7313e-02,  3.6928e-02,  6.3204e-02, -3.3480e-03,  3.2450e-02,\n",
       "                       6.3705e-02, -3.5354e-02,  2.0490e-02,  6.5467e-03, -1.6718e-02,\n",
       "                      -4.3265e-03, -1.5318e-02,  8.9261e-02, -1.9545e-02,  5.0036e-02,\n",
       "                      -1.2051e-02,  1.2728e-02, -1.5072e-02, -4.2227e-02, -2.9383e-02,\n",
       "                       6.7531e-02,  1.2982e-02, -2.9195e-02, -2.5867e-02,  1.6483e-02,\n",
       "                      -4.7165e-02, -4.6726e-02, -4.8063e-02,  5.5657e-02, -5.8472e-02,\n",
       "                      -1.8130e-02,  6.5477e-02,  7.2996e-03,  4.0282e-02,  3.0727e-02,\n",
       "                       2.7194e-02, -1.9856e-02, -3.3089e-02, -4.4142e-03, -2.6999e-02,\n",
       "                       8.4199e-03,  5.2889e-03,  3.5003e-02, -6.9924e-02, -7.5995e-02,\n",
       "                      -5.0991e-02, -1.4911e-02, -3.0011e-02, -2.8808e-02, -2.0153e-02,\n",
       "                      -1.2054e-02,  4.7177e-02, -3.5911e-02,  3.8258e-02, -2.4434e-02,\n",
       "                      -1.4923e-02, -1.9341e-02,  3.9408e-03, -6.5411e-06, -3.1216e-02,\n",
       "                      -1.5020e-02, -6.0691e-02,  6.9565e-02, -5.1988e-02,  4.9913e-02,\n",
       "                      -4.4771e-02, -1.6656e-03, -3.3124e-02, -7.9679e-02, -2.9941e-02,\n",
       "                       4.4312e-02,  1.5990e-02, -6.3359e-02,  3.5833e-02,  1.8240e-02,\n",
       "                       2.2540e-02,  1.1831e-02, -1.6800e-02,  5.9178e-03, -4.3958e-02,\n",
       "                      -1.2121e-02,  2.4051e-03, -5.5372e-02, -1.7208e-02, -5.1028e-02,\n",
       "                      -1.1446e-02,  2.3675e-02, -3.7793e-02])),\n",
       "             ('conv_block1.4.running_var',\n",
       "              tensor([0.9229, 0.9229, 0.9139, 0.9634, 0.9332, 0.9331, 0.9299, 0.9160, 1.0007,\n",
       "                      0.9234, 0.9340, 0.9454, 0.9136, 0.9193, 0.9293, 0.9212, 0.9213, 0.9224,\n",
       "                      0.9210, 0.9279, 0.9527, 0.9438, 0.9333, 0.9546, 1.0297, 0.9262, 0.9488,\n",
       "                      0.9238, 0.9251, 0.9246, 0.9146, 0.9247, 0.9276, 0.9254, 0.9271, 0.9459,\n",
       "                      0.9535, 0.9357, 0.9239, 0.9222, 0.9279, 0.9304, 0.9382, 0.9552, 0.9223,\n",
       "                      0.9290, 0.9265, 0.9189, 0.9268, 0.9423, 0.9235, 0.9173, 0.9350, 0.9204,\n",
       "                      0.9267, 0.9176, 1.0012, 0.9200, 0.9451, 0.9259, 0.9311, 0.9285, 0.9218,\n",
       "                      0.9186, 0.9388, 0.9687, 0.9197, 0.9410, 0.9237, 0.9481, 0.9235, 0.9425,\n",
       "                      0.9756, 0.9170, 0.9237, 0.9188, 0.9387, 0.9171, 0.9202, 0.9207, 0.9243,\n",
       "                      0.9896, 0.9134, 0.9343, 0.9537, 0.9226, 0.9318, 0.9138, 0.9189, 0.9374,\n",
       "                      0.9468, 0.9243, 0.9287, 0.9172, 0.9156, 0.9148, 0.9309, 0.9255, 0.9189,\n",
       "                      0.9159, 0.9394, 0.9539, 0.9780, 0.9184, 0.9263, 0.9320, 0.9549, 0.9406,\n",
       "                      0.9298, 0.9289, 0.9234, 0.9295, 0.9328, 0.9202, 0.9370, 0.9215, 0.9231,\n",
       "                      0.9213, 0.9566, 0.9814, 0.9263, 0.9355, 0.9324, 0.9210, 0.9389, 0.9281,\n",
       "                      0.9344, 0.9538])),\n",
       "             ('conv_block1.4.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block2.0.weight',\n",
       "              tensor([[[[ 0.0281,  0.0206,  0.0240],\n",
       "                        [-0.0307,  0.0344,  0.0192],\n",
       "                        [-0.0317,  0.0299, -0.0342]],\n",
       "              \n",
       "                       [[-0.0393, -0.0118, -0.0133],\n",
       "                        [ 0.0157,  0.0157,  0.0080],\n",
       "                        [-0.0291, -0.0382, -0.0334]],\n",
       "              \n",
       "                       [[-0.0211,  0.0141,  0.0248],\n",
       "                        [-0.0142,  0.0091, -0.0185],\n",
       "                        [-0.0230,  0.0108,  0.0218]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0361,  0.0326,  0.0349],\n",
       "                        [ 0.0252,  0.0132,  0.0076],\n",
       "                        [-0.0178,  0.0351,  0.0079]],\n",
       "              \n",
       "                       [[-0.0265, -0.0406,  0.0301],\n",
       "                        [-0.0381,  0.0260, -0.0168],\n",
       "                        [ 0.0388,  0.0411, -0.0230]],\n",
       "              \n",
       "                       [[ 0.0037, -0.0156,  0.0377],\n",
       "                        [ 0.0226, -0.0294, -0.0049],\n",
       "                        [ 0.0335,  0.0292,  0.0341]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0075, -0.0060, -0.0304],\n",
       "                        [-0.0040,  0.0237, -0.0407],\n",
       "                        [ 0.0188, -0.0111,  0.0225]],\n",
       "              \n",
       "                       [[-0.0115, -0.0291,  0.0193],\n",
       "                        [ 0.0022, -0.0232, -0.0080],\n",
       "                        [-0.0183, -0.0282, -0.0003]],\n",
       "              \n",
       "                       [[ 0.0108, -0.0068,  0.0390],\n",
       "                        [ 0.0362,  0.0405, -0.0234],\n",
       "                        [-0.0151,  0.0079,  0.0168]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0030,  0.0239,  0.0352],\n",
       "                        [-0.0342, -0.0381, -0.0192],\n",
       "                        [-0.0145,  0.0175, -0.0094]],\n",
       "              \n",
       "                       [[ 0.0262,  0.0192,  0.0035],\n",
       "                        [-0.0330, -0.0171,  0.0202],\n",
       "                        [ 0.0197, -0.0269, -0.0031]],\n",
       "              \n",
       "                       [[ 0.0078, -0.0223,  0.0103],\n",
       "                        [ 0.0199,  0.0286,  0.0231],\n",
       "                        [-0.0125,  0.0386, -0.0267]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0285, -0.0354, -0.0242],\n",
       "                        [ 0.0399, -0.0401,  0.0028],\n",
       "                        [ 0.0235,  0.0232,  0.0349]],\n",
       "              \n",
       "                       [[ 0.0259,  0.0034,  0.0382],\n",
       "                        [ 0.0290,  0.0092, -0.0364],\n",
       "                        [ 0.0314,  0.0370,  0.0132]],\n",
       "              \n",
       "                       [[ 0.0258, -0.0175, -0.0111],\n",
       "                        [ 0.0139,  0.0120, -0.0131],\n",
       "                        [-0.0053,  0.0147,  0.0383]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0291, -0.0003, -0.0076],\n",
       "                        [ 0.0322,  0.0308, -0.0367],\n",
       "                        [ 0.0407, -0.0073,  0.0382]],\n",
       "              \n",
       "                       [[-0.0146,  0.0129,  0.0375],\n",
       "                        [-0.0340,  0.0178,  0.0385],\n",
       "                        [ 0.0381, -0.0178, -0.0072]],\n",
       "              \n",
       "                       [[-0.0022, -0.0160, -0.0269],\n",
       "                        [ 0.0304,  0.0232, -0.0041],\n",
       "                        [ 0.0350, -0.0280, -0.0113]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0197,  0.0341,  0.0381],\n",
       "                        [ 0.0161, -0.0021,  0.0006],\n",
       "                        [-0.0042,  0.0299,  0.0178]],\n",
       "              \n",
       "                       [[ 0.0313,  0.0415,  0.0069],\n",
       "                        [ 0.0157,  0.0147,  0.0144],\n",
       "                        [ 0.0317,  0.0170, -0.0344]],\n",
       "              \n",
       "                       [[ 0.0406,  0.0376, -0.0263],\n",
       "                        [-0.0369,  0.0199,  0.0396],\n",
       "                        [-0.0381,  0.0415, -0.0313]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0031, -0.0320, -0.0329],\n",
       "                        [-0.0143, -0.0302,  0.0069],\n",
       "                        [ 0.0406, -0.0223,  0.0055]],\n",
       "              \n",
       "                       [[ 0.0387,  0.0009,  0.0031],\n",
       "                        [-0.0014,  0.0226, -0.0025],\n",
       "                        [-0.0024,  0.0174,  0.0182]],\n",
       "              \n",
       "                       [[ 0.0047, -0.0317,  0.0368],\n",
       "                        [-0.0191, -0.0106, -0.0351],\n",
       "                        [ 0.0167,  0.0354, -0.0159]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0269, -0.0404,  0.0018],\n",
       "                        [ 0.0344, -0.0238,  0.0365],\n",
       "                        [ 0.0218,  0.0145, -0.0394]],\n",
       "              \n",
       "                       [[-0.0415, -0.0297,  0.0228],\n",
       "                        [-0.0205,  0.0334,  0.0330],\n",
       "                        [ 0.0182, -0.0231, -0.0247]],\n",
       "              \n",
       "                       [[-0.0107, -0.0101, -0.0318],\n",
       "                        [-0.0363,  0.0314,  0.0119],\n",
       "                        [ 0.0365, -0.0048, -0.0052]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0354, -0.0352, -0.0341],\n",
       "                        [ 0.0335,  0.0013,  0.0340],\n",
       "                        [ 0.0175,  0.0237,  0.0135]],\n",
       "              \n",
       "                       [[ 0.0260,  0.0011,  0.0079],\n",
       "                        [ 0.0395, -0.0103,  0.0200],\n",
       "                        [-0.0104,  0.0254,  0.0090]],\n",
       "              \n",
       "                       [[ 0.0317, -0.0204,  0.0045],\n",
       "                        [-0.0272, -0.0170,  0.0169],\n",
       "                        [ 0.0031,  0.0357, -0.0205]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0322,  0.0150,  0.0338],\n",
       "                        [ 0.0295,  0.0065,  0.0241],\n",
       "                        [-0.0230,  0.0078,  0.0275]],\n",
       "              \n",
       "                       [[ 0.0139,  0.0241, -0.0387],\n",
       "                        [-0.0319, -0.0409,  0.0174],\n",
       "                        [-0.0163, -0.0205, -0.0060]],\n",
       "              \n",
       "                       [[-0.0299, -0.0231,  0.0368],\n",
       "                        [ 0.0200, -0.0329,  0.0387],\n",
       "                        [ 0.0062, -0.0313,  0.0219]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0168, -0.0389, -0.0127],\n",
       "                        [ 0.0416, -0.0086,  0.0091],\n",
       "                        [-0.0043,  0.0233,  0.0068]],\n",
       "              \n",
       "                       [[-0.0183,  0.0371,  0.0158],\n",
       "                        [ 0.0263,  0.0334, -0.0119],\n",
       "                        [ 0.0246, -0.0006,  0.0022]],\n",
       "              \n",
       "                       [[ 0.0044, -0.0373,  0.0346],\n",
       "                        [-0.0199,  0.0030,  0.0067],\n",
       "                        [-0.0387,  0.0355, -0.0158]]]])),\n",
       "             ('conv_block2.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1.])),\n",
       "             ('conv_block2.1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.1.running_mean',\n",
       "              tensor([ 0.0709,  0.0340, -0.0112,  0.0117,  0.0073,  0.0488,  0.0308,  0.0091,\n",
       "                      -0.0969,  0.0406,  0.0139, -0.0275, -0.0433,  0.0156, -0.0874,  0.0624,\n",
       "                      -0.0239,  0.0927, -0.0810, -0.0508, -0.0480, -0.1120,  0.0138, -0.0651,\n",
       "                      -0.0170,  0.0024, -0.0866,  0.0127,  0.0821,  0.0061, -0.0115, -0.0690,\n",
       "                       0.0183,  0.0332,  0.0103, -0.0203, -0.0229, -0.0250, -0.0205,  0.0619,\n",
       "                      -0.0994,  0.0299, -0.0316, -0.0058, -0.0432, -0.0564,  0.0609, -0.0123,\n",
       "                      -0.0121, -0.0006, -0.0142,  0.0323, -0.0990,  0.0355,  0.0307,  0.0033,\n",
       "                       0.0636, -0.0219,  0.0188,  0.0056, -0.0098, -0.0924, -0.0673, -0.0811,\n",
       "                      -0.0536,  0.0090, -0.0389,  0.0206,  0.0027,  0.0221,  0.0115, -0.0189,\n",
       "                       0.0988,  0.0494,  0.0536,  0.0304, -0.0192,  0.0104, -0.0139,  0.0419,\n",
       "                      -0.0217, -0.0330, -0.0661, -0.0314,  0.0357, -0.0008,  0.0065, -0.0211,\n",
       "                      -0.0088,  0.0495,  0.0438,  0.0185,  0.0135,  0.0109,  0.0050,  0.0870,\n",
       "                       0.0332,  0.0699,  0.0641,  0.0385, -0.0605,  0.0047,  0.0614, -0.0204,\n",
       "                       0.0335, -0.0458, -0.0391, -0.0254, -0.0709, -0.0336, -0.0301,  0.0071,\n",
       "                       0.0195,  0.0132,  0.0213,  0.0511, -0.0292,  0.0142,  0.0131, -0.1197,\n",
       "                      -0.0117, -0.0676, -0.0483,  0.0521,  0.0541, -0.0343,  0.0309,  0.0094,\n",
       "                      -0.0305, -0.0858,  0.0097,  0.0169, -0.0071, -0.0182, -0.0212, -0.0019,\n",
       "                      -0.0469, -0.0292, -0.0018,  0.0522,  0.0051, -0.0092, -0.0007,  0.0170,\n",
       "                       0.0901, -0.0722, -0.0091,  0.0383,  0.0083, -0.0097, -0.0150,  0.0202,\n",
       "                      -0.0544, -0.0104,  0.0159,  0.0693,  0.0310, -0.0144,  0.0559, -0.0368,\n",
       "                       0.0548,  0.0117, -0.0870, -0.0384, -0.0316,  0.1047,  0.0520, -0.0653,\n",
       "                      -0.0398, -0.0176,  0.0280, -0.0169, -0.0468,  0.1098, -0.0104,  0.0134,\n",
       "                      -0.0660, -0.0184,  0.0089, -0.0313, -0.0162, -0.0058,  0.0349,  0.0064,\n",
       "                      -0.0382, -0.0088, -0.0690,  0.0090,  0.0431,  0.0211, -0.0114,  0.0762,\n",
       "                       0.0688,  0.0345,  0.0195,  0.0833,  0.0240,  0.0307, -0.0023,  0.0172,\n",
       "                      -0.0717, -0.0490,  0.0074, -0.1337, -0.0823, -0.0866, -0.0671,  0.0787,\n",
       "                      -0.0751,  0.1059,  0.0362,  0.0018,  0.0447, -0.0345, -0.0570,  0.0100,\n",
       "                      -0.0059,  0.0191,  0.0680, -0.0598, -0.0700,  0.0537, -0.0057, -0.0813,\n",
       "                      -0.0435,  0.0350,  0.0007,  0.0390, -0.0101,  0.0141, -0.0285,  0.0511,\n",
       "                       0.0454,  0.0238, -0.0304, -0.0067, -0.0356,  0.0141,  0.0088, -0.0225,\n",
       "                      -0.0353, -0.0464, -0.0063,  0.0388,  0.0609, -0.0309,  0.0353,  0.0849,\n",
       "                       0.0985,  0.0347,  0.1013, -0.0097, -0.0490, -0.0048, -0.0016,  0.0529])),\n",
       "             ('conv_block2.1.running_var',\n",
       "              tensor([0.9455, 0.9217, 0.9261, 0.9323, 0.9291, 0.9383, 0.9431, 0.9457, 0.9373,\n",
       "                      0.9222, 0.9370, 0.9212, 0.9370, 0.9330, 0.9542, 0.9280, 0.9398, 0.9334,\n",
       "                      0.9385, 0.9504, 0.9315, 0.9376, 0.9209, 0.9350, 0.9533, 0.9255, 0.9505,\n",
       "                      0.9390, 0.9502, 0.9412, 0.9294, 0.9354, 0.9338, 0.9252, 0.9241, 0.9283,\n",
       "                      0.9235, 0.9360, 0.9350, 0.9332, 0.9754, 0.9358, 0.9269, 0.9252, 0.9427,\n",
       "                      0.9464, 0.9258, 0.9289, 0.9298, 0.9372, 0.9328, 0.9416, 0.9417, 0.9436,\n",
       "                      0.9349, 0.9525, 0.9260, 0.9398, 0.9258, 0.9440, 0.9207, 0.9390, 0.9289,\n",
       "                      0.9259, 0.9462, 0.9353, 0.9310, 0.9349, 0.9330, 0.9464, 0.9296, 0.9272,\n",
       "                      0.9394, 0.9421, 0.9292, 0.9200, 0.9304, 0.9238, 0.9303, 0.9239, 0.9362,\n",
       "                      0.9510, 0.9248, 0.9423, 0.9282, 0.9302, 0.9514, 0.9268, 0.9242, 0.9365,\n",
       "                      0.9252, 0.9229, 0.9327, 0.9215, 0.9252, 0.9626, 0.9367, 0.9445, 0.9448,\n",
       "                      0.9382, 0.9487, 0.9223, 0.9414, 0.9442, 0.9288, 0.9402, 0.9334, 0.9371,\n",
       "                      0.9383, 0.9338, 0.9376, 0.9270, 0.9616, 0.9268, 0.9231, 0.9189, 0.9346,\n",
       "                      0.9322, 0.9372, 0.9569, 0.9396, 0.9331, 0.9328, 0.9367, 0.9336, 0.9336,\n",
       "                      0.9247, 0.9299, 0.9348, 0.9443, 0.9467, 0.9193, 0.9187, 0.9369, 0.9338,\n",
       "                      0.9435, 0.9396, 0.9399, 0.9305, 0.9603, 0.9359, 0.9257, 0.9356, 0.9417,\n",
       "                      0.9374, 0.9798, 0.9181, 0.9318, 0.9324, 0.9273, 0.9289, 0.9311, 0.9274,\n",
       "                      0.9310, 0.9248, 0.9492, 0.9283, 0.9358, 0.9405, 0.9273, 0.9222, 0.9253,\n",
       "                      0.9360, 0.9314, 0.9278, 0.9526, 0.9394, 0.9240, 0.9373, 0.9316, 0.9321,\n",
       "                      0.9380, 0.9410, 0.9363, 0.9243, 0.9297, 0.9320, 0.9427, 0.9353, 0.9326,\n",
       "                      0.9318, 0.9255, 0.9548, 0.9368, 0.9302, 0.9291, 0.9357, 0.9243, 0.9509,\n",
       "                      0.9420, 0.9235, 0.9646, 0.9370, 0.9339, 0.9248, 0.9301, 0.9315, 0.9314,\n",
       "                      0.9241, 0.9337, 0.9304, 0.9582, 0.9279, 0.9505, 0.9385, 0.9362, 0.9450,\n",
       "                      0.9409, 0.9363, 0.9446, 0.9277, 0.9279, 0.9441, 0.9528, 0.9302, 0.9432,\n",
       "                      0.9343, 0.9208, 0.9258, 0.9268, 0.9526, 0.9194, 0.9317, 0.9456, 0.9268,\n",
       "                      0.9332, 0.9251, 0.9267, 0.9380, 0.9311, 0.9421, 0.9486, 0.9413, 0.9296,\n",
       "                      0.9254, 0.9282, 0.9327, 0.9345, 0.9207, 0.9323, 0.9309, 0.9473, 0.9197,\n",
       "                      0.9249, 0.9241, 0.9251, 0.9241, 0.9390, 0.9350, 0.9314, 0.9519, 0.9217,\n",
       "                      0.9567, 0.9335, 0.9200, 0.9393])),\n",
       "             ('conv_block2.1.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block2.3.weight',\n",
       "              tensor([[[[-1.3476e-02,  3.2762e-02,  2.3146e-02],\n",
       "                        [ 1.8149e-03, -2.5378e-02,  7.2162e-03],\n",
       "                        [-1.1387e-02, -7.6452e-03,  1.5362e-02]],\n",
       "              \n",
       "                       [[ 7.6342e-03,  1.3365e-02,  2.7159e-02],\n",
       "                        [-1.5279e-02, -2.7468e-02, -4.3963e-03],\n",
       "                        [ 2.3887e-02,  2.7435e-02,  2.7124e-02]],\n",
       "              \n",
       "                       [[-8.4117e-03, -1.4346e-05, -3.0876e-02],\n",
       "                        [ 2.9414e-02, -8.3014e-04,  1.8804e-02],\n",
       "                        [ 1.7492e-02, -2.4422e-02,  3.8324e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-2.5125e-02, -1.6177e-02,  6.0266e-03],\n",
       "                        [-1.3887e-02, -3.2120e-02,  2.3117e-02],\n",
       "                        [ 8.1515e-03, -3.3351e-02, -1.1726e-02]],\n",
       "              \n",
       "                       [[ 2.5447e-02,  2.9808e-02,  2.1358e-02],\n",
       "                        [ 2.6595e-02, -3.3629e-02,  3.4713e-02],\n",
       "                        [ 3.3556e-03,  1.8568e-02, -3.2803e-02]],\n",
       "              \n",
       "                       [[ 2.2485e-02,  1.8682e-02, -1.4604e-02],\n",
       "                        [ 1.9243e-02,  1.2742e-02,  2.1484e-02],\n",
       "                        [-1.2364e-02,  1.5210e-02, -1.7552e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[-3.2045e-02,  2.6217e-02, -7.2108e-03],\n",
       "                        [ 3.5297e-02,  1.3973e-02, -2.4724e-02],\n",
       "                        [-2.4537e-02,  7.3408e-03, -8.2884e-03]],\n",
       "              \n",
       "                       [[ 3.0083e-02,  1.8732e-03,  3.1898e-02],\n",
       "                        [ 2.7961e-02,  3.1203e-03, -1.5670e-03],\n",
       "                        [ 2.5867e-02, -4.5061e-03,  7.1220e-03]],\n",
       "              \n",
       "                       [[ 3.0357e-02,  3.4068e-02,  1.3163e-02],\n",
       "                        [-2.3890e-02, -1.8038e-02, -4.6936e-03],\n",
       "                        [ 3.5370e-02,  2.8280e-02, -3.4122e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-1.1476e-02, -2.8373e-02, -4.1923e-03],\n",
       "                        [-8.7100e-03,  2.9875e-03, -1.0124e-02],\n",
       "                        [ 2.1050e-02,  2.3787e-02, -2.8113e-02]],\n",
       "              \n",
       "                       [[-1.0338e-02,  2.5321e-02, -3.0387e-02],\n",
       "                        [ 5.5822e-03,  4.9150e-04, -3.0821e-02],\n",
       "                        [ 9.8862e-03, -3.5577e-04, -9.7572e-03]],\n",
       "              \n",
       "                       [[ 2.5492e-03, -3.4722e-02,  2.0265e-03],\n",
       "                        [-3.6447e-03,  2.9818e-02, -3.4002e-02],\n",
       "                        [-3.4785e-02,  1.1536e-02, -3.3395e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[-3.3599e-02, -3.3655e-02,  1.5663e-02],\n",
       "                        [-8.1674e-03, -1.8873e-03, -2.5668e-02],\n",
       "                        [-1.3678e-02, -1.9975e-02,  2.2904e-03]],\n",
       "              \n",
       "                       [[ 3.6377e-04, -1.9635e-02,  5.8922e-03],\n",
       "                        [-7.1113e-03,  2.8380e-02, -1.7708e-02],\n",
       "                        [-1.4514e-02,  1.6422e-02,  1.4431e-02]],\n",
       "              \n",
       "                       [[ 1.8335e-02,  1.0352e-03,  1.9078e-02],\n",
       "                        [-3.0399e-02,  2.8988e-02,  4.5687e-03],\n",
       "                        [-6.8448e-03,  2.5318e-02, -5.2009e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 4.2840e-03,  1.4525e-02, -1.4584e-02],\n",
       "                        [-7.6435e-03, -2.3071e-02,  2.9092e-02],\n",
       "                        [-5.6254e-03, -1.8992e-02,  1.5766e-02]],\n",
       "              \n",
       "                       [[ 3.2696e-02,  2.2685e-02, -7.5891e-03],\n",
       "                        [ 2.7665e-02, -6.9403e-03,  3.4710e-02],\n",
       "                        [-1.9214e-02,  7.6023e-03, -9.9213e-03]],\n",
       "              \n",
       "                       [[ 1.9443e-02,  1.5343e-02, -1.9100e-02],\n",
       "                        [ 2.6575e-02,  1.5043e-02, -2.3675e-02],\n",
       "                        [-2.3013e-02,  3.2348e-02,  1.9562e-02]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[-3.3910e-03,  3.1233e-03,  2.9632e-02],\n",
       "                        [ 1.0993e-02, -1.9116e-02,  7.5274e-03],\n",
       "                        [-4.4940e-03, -3.0799e-02,  3.6053e-02]],\n",
       "              \n",
       "                       [[ 2.6605e-02, -9.1890e-04, -1.0004e-02],\n",
       "                        [ 1.4456e-02, -4.8193e-03, -7.0340e-04],\n",
       "                        [-3.4715e-02, -1.5934e-02, -2.5202e-02]],\n",
       "              \n",
       "                       [[-1.8890e-02,  2.7602e-02,  2.3726e-02],\n",
       "                        [ 1.1806e-02,  1.1321e-02, -2.4606e-02],\n",
       "                        [ 2.1047e-02,  2.0637e-02, -1.9309e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-1.7623e-03, -2.5345e-02, -1.4263e-02],\n",
       "                        [-6.7732e-03,  4.0666e-03,  3.9138e-03],\n",
       "                        [ 1.1761e-02,  1.7789e-03, -2.6276e-02]],\n",
       "              \n",
       "                       [[-1.3271e-02,  2.4830e-02,  2.8028e-03],\n",
       "                        [ 2.7833e-02, -1.8999e-02, -4.3584e-04],\n",
       "                        [ 3.9559e-03, -1.8504e-02,  5.4623e-03]],\n",
       "              \n",
       "                       [[-5.7854e-03, -3.7261e-03, -2.7199e-02],\n",
       "                        [-2.2623e-02,  3.4794e-03, -3.3863e-02],\n",
       "                        [-3.0690e-02,  2.4359e-02, -5.5269e-03]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 2.3824e-02, -1.6936e-02,  8.9230e-03],\n",
       "                        [-2.6119e-02, -1.3430e-03,  3.4022e-02],\n",
       "                        [ 1.2260e-02, -1.0595e-02,  2.5501e-02]],\n",
       "              \n",
       "                       [[ 2.8385e-02,  3.3820e-02, -3.5307e-02],\n",
       "                        [ 3.3994e-02,  3.4520e-02,  2.6656e-02],\n",
       "                        [ 1.7022e-02,  1.1622e-02,  1.4093e-02]],\n",
       "              \n",
       "                       [[ 3.4987e-02, -2.8142e-02,  2.2011e-02],\n",
       "                        [-8.6129e-03, -2.9027e-02, -3.0426e-02],\n",
       "                        [-1.4034e-03,  2.6989e-02,  3.3285e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 6.0137e-03, -3.0167e-02, -7.5262e-03],\n",
       "                        [ 1.5403e-02, -5.2913e-04,  1.2804e-03],\n",
       "                        [ 3.0349e-02,  2.7595e-02,  2.7182e-02]],\n",
       "              \n",
       "                       [[-1.7893e-02, -1.1880e-02,  1.5000e-02],\n",
       "                        [ 1.7267e-02,  2.4830e-03, -3.5190e-02],\n",
       "                        [ 1.1540e-02,  3.4762e-02, -1.2437e-03]],\n",
       "              \n",
       "                       [[ 2.9651e-02, -2.5666e-02,  9.9470e-03],\n",
       "                        [ 3.5326e-02, -2.1926e-02,  2.1159e-02],\n",
       "                        [-2.4251e-03,  8.7477e-03,  8.2077e-03]]],\n",
       "              \n",
       "              \n",
       "                      [[[-9.5520e-03,  5.7561e-03, -6.6860e-04],\n",
       "                        [-2.7592e-02, -1.3982e-02, -2.5116e-02],\n",
       "                        [-2.2377e-02, -2.9013e-02,  2.6572e-02]],\n",
       "              \n",
       "                       [[-2.9759e-02,  1.2149e-03, -2.7698e-02],\n",
       "                        [ 4.4649e-03,  3.2244e-02, -2.5556e-02],\n",
       "                        [ 3.3168e-02,  2.2921e-02,  2.3472e-02]],\n",
       "              \n",
       "                       [[ 2.7306e-03,  9.0198e-03,  2.4940e-02],\n",
       "                        [-5.1622e-03, -3.4016e-02, -3.0874e-02],\n",
       "                        [-2.6167e-02, -1.2693e-02, -2.7316e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 3.5570e-04, -8.5251e-03, -1.9320e-02],\n",
       "                        [ 2.9306e-02, -2.9005e-04,  5.1374e-03],\n",
       "                        [ 1.6815e-02,  4.3450e-03, -3.2047e-02]],\n",
       "              \n",
       "                       [[ 6.0685e-03, -1.0860e-02, -2.6730e-02],\n",
       "                        [-8.1540e-03,  2.5175e-02,  3.2022e-02],\n",
       "                        [ 1.5785e-02,  2.6991e-02,  5.2887e-03]],\n",
       "              \n",
       "                       [[-9.7751e-03,  8.4281e-03,  1.7883e-02],\n",
       "                        [-2.0249e-03, -3.5612e-02,  2.2090e-02],\n",
       "                        [ 1.5171e-02, -3.2856e-02, -3.5191e-02]]]])),\n",
       "             ('conv_block2.3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1.])),\n",
       "             ('conv_block2.4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.4.running_mean',\n",
       "              tensor([-1.7175e-02, -4.6074e-02, -2.2242e-02, -2.1699e-02,  4.9785e-03,\n",
       "                       2.8320e-02,  2.1582e-02, -1.7998e-02,  3.3753e-02, -3.8195e-02,\n",
       "                      -7.9926e-02,  2.1437e-02,  2.5823e-02, -8.0780e-03, -2.8546e-02,\n",
       "                      -2.1611e-02,  6.7566e-02,  1.3400e-02, -5.6710e-03, -2.6519e-02,\n",
       "                       4.0278e-02,  2.1741e-05,  1.7882e-02, -7.1068e-03,  6.3395e-02,\n",
       "                      -2.6407e-02,  9.8806e-02,  1.9782e-02, -1.4172e-02, -4.9223e-02,\n",
       "                       2.7019e-02,  8.5262e-02, -1.0816e-02,  1.1337e-02,  4.8233e-03,\n",
       "                       7.5224e-02, -1.5755e-02,  1.6576e-02,  5.2025e-02,  2.6069e-02,\n",
       "                       9.4506e-03, -2.3995e-02, -3.1413e-02, -4.3870e-03,  2.9290e-02,\n",
       "                       3.1264e-02,  4.3714e-03,  3.9198e-02,  8.0788e-03,  3.3054e-02,\n",
       "                       9.9097e-03, -1.9107e-02,  3.4804e-02, -5.3591e-02, -2.3221e-02,\n",
       "                       4.9744e-02, -5.5952e-02,  4.1033e-03,  4.2725e-02, -7.0718e-03,\n",
       "                       1.0223e-02,  1.5892e-02,  2.6406e-02,  2.7792e-02,  1.6527e-02,\n",
       "                       6.3514e-02,  7.3322e-03,  1.0995e-02, -3.1110e-02,  1.2439e-02,\n",
       "                       6.9529e-02,  4.1469e-02,  2.0726e-02, -9.3186e-03,  1.3953e-02,\n",
       "                       6.9306e-03,  1.0687e-02, -3.5271e-02,  4.5408e-02,  2.4419e-02,\n",
       "                       6.2624e-02, -4.5735e-02, -2.4406e-03,  6.9878e-02, -3.4328e-02,\n",
       "                       5.4695e-02,  9.0896e-02,  3.4175e-02,  2.5258e-02,  3.5587e-02,\n",
       "                      -2.9308e-02, -2.0595e-02,  1.5111e-02,  4.2407e-02, -2.0941e-02,\n",
       "                       3.3364e-02, -2.5480e-02,  6.3575e-03, -1.2135e-02, -5.9804e-04,\n",
       "                      -7.5396e-02, -2.7864e-02,  4.4543e-02, -1.9073e-02, -7.7290e-03,\n",
       "                      -6.1541e-03, -2.8253e-02, -1.1107e-02, -6.2322e-03, -1.8044e-03,\n",
       "                       1.8175e-02,  4.6337e-02, -3.4663e-03,  3.6150e-03,  3.1240e-02,\n",
       "                      -2.1223e-02,  2.1722e-02,  6.1408e-02,  4.7988e-02, -1.8218e-02,\n",
       "                       2.9291e-02,  3.9711e-02, -7.3660e-03,  5.4285e-02, -5.7405e-02,\n",
       "                      -9.6584e-03,  4.2512e-02,  2.5655e-02, -8.7852e-03, -1.4996e-02,\n",
       "                       4.8846e-02,  3.5010e-02, -3.2860e-02,  3.3483e-02, -2.8137e-02,\n",
       "                      -2.0730e-02,  2.8613e-02, -3.4733e-02, -6.7155e-02, -2.8164e-02,\n",
       "                      -4.9563e-02, -1.7516e-02, -6.6177e-02,  6.6757e-02,  1.2967e-02,\n",
       "                      -3.4971e-03,  1.8334e-02, -1.6767e-03,  1.4076e-02,  3.9561e-02,\n",
       "                      -2.6992e-02, -2.9187e-02, -3.5775e-02, -1.4699e-02,  6.0917e-03,\n",
       "                      -2.6558e-02,  7.8618e-02, -1.6804e-02,  2.0693e-02,  3.9096e-02,\n",
       "                      -5.2655e-02, -3.3080e-02,  1.7261e-02,  7.3815e-02,  3.1339e-02,\n",
       "                       1.6986e-02, -8.5684e-03, -5.0929e-03,  2.4734e-02, -1.1787e-01,\n",
       "                      -4.9383e-02, -6.9700e-02,  2.4346e-02,  2.0785e-02,  1.1026e-03,\n",
       "                      -9.8779e-03,  2.0669e-02,  6.1604e-02, -4.1829e-02,  2.0760e-02,\n",
       "                      -2.2500e-02, -1.8072e-02, -6.0401e-02,  3.0826e-02, -1.7466e-02,\n",
       "                      -5.6874e-03, -1.2268e-02, -1.1870e-02, -8.6538e-05, -5.2062e-02,\n",
       "                      -6.3668e-02, -1.8783e-02,  5.8881e-03, -6.4736e-02, -4.0542e-03,\n",
       "                      -5.2266e-02,  1.9502e-02,  5.0007e-02, -6.6582e-02, -1.7130e-02,\n",
       "                      -1.2867e-02,  9.4893e-02, -6.2451e-02,  2.6824e-02,  1.5344e-02,\n",
       "                      -1.2749e-03,  1.6924e-02,  6.1445e-02,  1.1316e-02,  2.4574e-02,\n",
       "                       2.3634e-02, -1.3081e-02, -2.9949e-02, -2.5414e-02,  1.0481e-02,\n",
       "                       4.3846e-02,  5.0219e-02,  5.3216e-02, -2.3769e-02, -6.2199e-02,\n",
       "                       9.9960e-03, -1.6493e-02, -4.6611e-02,  2.5717e-02, -3.0459e-02,\n",
       "                       4.3152e-03,  3.0713e-02,  4.2877e-02,  1.5288e-02,  6.8412e-02,\n",
       "                       6.3999e-03,  2.5722e-02, -3.4639e-02, -1.3192e-02,  4.0239e-02,\n",
       "                       2.3375e-02, -6.0497e-02,  6.0954e-02, -6.3605e-03,  2.1749e-02,\n",
       "                      -5.3944e-02,  1.3043e-02, -4.7392e-02,  8.3183e-02,  4.9092e-02,\n",
       "                       2.1554e-02, -1.2650e-02, -9.2873e-03, -1.7495e-03,  1.1571e-02,\n",
       "                       1.2591e-03,  4.9339e-03, -1.0501e-02,  8.5255e-03, -1.9455e-02,\n",
       "                       3.6948e-03])),\n",
       "             ('conv_block2.4.running_var',\n",
       "              tensor([0.9268, 0.9228, 0.9325, 0.9286, 0.9216, 0.9277, 0.9303, 0.9315, 0.9300,\n",
       "                      0.9244, 0.9352, 0.9294, 0.9312, 0.9313, 0.9326, 0.9283, 0.9321, 0.9325,\n",
       "                      0.9388, 0.9418, 0.9315, 0.9302, 0.9259, 0.9258, 0.9286, 0.9278, 0.9361,\n",
       "                      0.9399, 0.9330, 0.9253, 0.9233, 0.9294, 0.9348, 0.9295, 0.9332, 0.9338,\n",
       "                      0.9364, 0.9251, 0.9318, 0.9300, 0.9280, 0.9316, 0.9357, 0.9287, 0.9263,\n",
       "                      0.9327, 0.9342, 0.9301, 0.9287, 0.9301, 0.9301, 0.9297, 0.9310, 0.9281,\n",
       "                      0.9300, 0.9306, 0.9248, 0.9342, 0.9283, 0.9320, 0.9351, 0.9306, 0.9354,\n",
       "                      0.9258, 0.9343, 0.9465, 0.9256, 0.9283, 0.9261, 0.9289, 0.9338, 0.9336,\n",
       "                      0.9257, 0.9296, 0.9375, 0.9384, 0.9307, 0.9242, 0.9346, 0.9253, 0.9321,\n",
       "                      0.9213, 0.9278, 0.9315, 0.9337, 0.9305, 0.9297, 0.9294, 0.9245, 0.9227,\n",
       "                      0.9322, 0.9368, 0.9294, 0.9260, 0.9354, 0.9344, 0.9349, 0.9332, 0.9345,\n",
       "                      0.9364, 0.9299, 0.9303, 0.9273, 0.9315, 0.9325, 0.9276, 0.9347, 0.9380,\n",
       "                      0.9349, 0.9349, 0.9238, 0.9368, 0.9264, 0.9251, 0.9275, 0.9356, 0.9250,\n",
       "                      0.9407, 0.9350, 0.9240, 0.9291, 0.9340, 0.9283, 0.9336, 0.9385, 0.9352,\n",
       "                      0.9345, 0.9392, 0.9250, 0.9300, 0.9330, 0.9318, 0.9310, 0.9389, 0.9378,\n",
       "                      0.9276, 0.9369, 0.9309, 0.9555, 0.9332, 0.9269, 0.9233, 0.9393, 0.9295,\n",
       "                      0.9284, 0.9320, 0.9337, 0.9341, 0.9299, 0.9363, 0.9298, 0.9275, 0.9293,\n",
       "                      0.9360, 0.9257, 0.9257, 0.9254, 0.9379, 0.9301, 0.9237, 0.9309, 0.9313,\n",
       "                      0.9291, 0.9462, 0.9331, 0.9328, 0.9229, 0.9254, 0.9301, 0.9390, 0.9353,\n",
       "                      0.9428, 0.9223, 0.9239, 0.9395, 0.9250, 0.9242, 0.9304, 0.9324, 0.9389,\n",
       "                      0.9248, 0.9317, 0.9436, 0.9357, 0.9384, 0.9336, 0.9307, 0.9238, 0.9297,\n",
       "                      0.9406, 0.9319, 0.9272, 0.9318, 0.9311, 0.9293, 0.9286, 0.9303, 0.9327,\n",
       "                      0.9341, 0.9384, 0.9328, 0.9292, 0.9381, 0.9288, 0.9338, 0.9277, 0.9267,\n",
       "                      0.9359, 0.9253, 0.9290, 0.9318, 0.9278, 0.9356, 0.9397, 0.9283, 0.9207,\n",
       "                      0.9298, 0.9351, 0.9233, 0.9253, 0.9223, 0.9382, 0.9319, 0.9308, 0.9258,\n",
       "                      0.9334, 0.9242, 0.9298, 0.9548, 0.9532, 0.9300, 0.9249, 0.9381, 0.9277,\n",
       "                      0.9399, 0.9271, 0.9281, 0.9474, 0.9323, 0.9364, 0.9553, 0.9348, 0.9314,\n",
       "                      0.9563, 0.9284, 0.9305, 0.9401, 0.9292, 0.9394, 0.9318, 0.9301, 0.9326,\n",
       "                      0.9316, 0.9280, 0.9306, 0.9229])),\n",
       "             ('conv_block2.4.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block3.0.weight',\n",
       "              tensor([[[[-0.0212, -0.0198, -0.0064],\n",
       "                        [-0.0037,  0.0155,  0.0213],\n",
       "                        [ 0.0222, -0.0233, -0.0138]],\n",
       "              \n",
       "                       [[-0.0044,  0.0258,  0.0280],\n",
       "                        [-0.0267, -0.0011,  0.0279],\n",
       "                        [-0.0190, -0.0183,  0.0049]],\n",
       "              \n",
       "                       [[ 0.0240, -0.0199, -0.0282],\n",
       "                        [ 0.0292, -0.0120, -0.0046],\n",
       "                        [ 0.0222,  0.0029,  0.0247]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0254, -0.0211,  0.0133],\n",
       "                        [ 0.0107,  0.0129,  0.0022],\n",
       "                        [-0.0065,  0.0107, -0.0172]],\n",
       "              \n",
       "                       [[-0.0098, -0.0033,  0.0150],\n",
       "                        [ 0.0105,  0.0148,  0.0056],\n",
       "                        [ 0.0152,  0.0291,  0.0209]],\n",
       "              \n",
       "                       [[ 0.0226,  0.0212,  0.0242],\n",
       "                        [-0.0235, -0.0136,  0.0220],\n",
       "                        [ 0.0121, -0.0027, -0.0156]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0074,  0.0162, -0.0244],\n",
       "                        [ 0.0093, -0.0083, -0.0215],\n",
       "                        [ 0.0112,  0.0232,  0.0169]],\n",
       "              \n",
       "                       [[-0.0096, -0.0291, -0.0075],\n",
       "                        [-0.0276, -0.0197, -0.0277],\n",
       "                        [-0.0007,  0.0018, -0.0053]],\n",
       "              \n",
       "                       [[ 0.0132,  0.0229,  0.0031],\n",
       "                        [ 0.0021, -0.0239, -0.0086],\n",
       "                        [-0.0166,  0.0097,  0.0003]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0253, -0.0178,  0.0213],\n",
       "                        [ 0.0277, -0.0122, -0.0260],\n",
       "                        [-0.0255,  0.0189, -0.0215]],\n",
       "              \n",
       "                       [[ 0.0025, -0.0244,  0.0227],\n",
       "                        [ 0.0149, -0.0096, -0.0206],\n",
       "                        [ 0.0238,  0.0278,  0.0019]],\n",
       "              \n",
       "                       [[-0.0147,  0.0171, -0.0185],\n",
       "                        [ 0.0131,  0.0086,  0.0198],\n",
       "                        [ 0.0146, -0.0277, -0.0227]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0088, -0.0035,  0.0083],\n",
       "                        [-0.0141,  0.0071,  0.0076],\n",
       "                        [-0.0004, -0.0291, -0.0006]],\n",
       "              \n",
       "                       [[-0.0213, -0.0141,  0.0189],\n",
       "                        [-0.0061,  0.0093, -0.0055],\n",
       "                        [ 0.0138,  0.0128,  0.0064]],\n",
       "              \n",
       "                       [[ 0.0230,  0.0012, -0.0031],\n",
       "                        [ 0.0195,  0.0137, -0.0068],\n",
       "                        [ 0.0248,  0.0114, -0.0288]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0127, -0.0006, -0.0197],\n",
       "                        [ 0.0151,  0.0083, -0.0262],\n",
       "                        [ 0.0060, -0.0232,  0.0270]],\n",
       "              \n",
       "                       [[ 0.0240, -0.0043, -0.0189],\n",
       "                        [-0.0223,  0.0228, -0.0118],\n",
       "                        [ 0.0269, -0.0023,  0.0093]],\n",
       "              \n",
       "                       [[-0.0029, -0.0139,  0.0090],\n",
       "                        [-0.0199,  0.0049, -0.0123],\n",
       "                        [ 0.0276,  0.0268, -0.0109]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[-0.0117,  0.0228,  0.0267],\n",
       "                        [-0.0264, -0.0062,  0.0081],\n",
       "                        [ 0.0011,  0.0112, -0.0290]],\n",
       "              \n",
       "                       [[ 0.0019, -0.0034,  0.0189],\n",
       "                        [-0.0009,  0.0169, -0.0258],\n",
       "                        [ 0.0201, -0.0099,  0.0041]],\n",
       "              \n",
       "                       [[-0.0257, -0.0080, -0.0145],\n",
       "                        [-0.0199,  0.0230,  0.0266],\n",
       "                        [-0.0110, -0.0181, -0.0049]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0068, -0.0029, -0.0264],\n",
       "                        [-0.0044,  0.0256,  0.0145],\n",
       "                        [ 0.0290,  0.0032, -0.0114]],\n",
       "              \n",
       "                       [[-0.0010,  0.0102,  0.0052],\n",
       "                        [-0.0006,  0.0231,  0.0187],\n",
       "                        [-0.0090,  0.0293,  0.0045]],\n",
       "              \n",
       "                       [[-0.0040, -0.0081,  0.0201],\n",
       "                        [-0.0103,  0.0266,  0.0179],\n",
       "                        [ 0.0237,  0.0081,  0.0087]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0170,  0.0065,  0.0080],\n",
       "                        [ 0.0129, -0.0165,  0.0004],\n",
       "                        [ 0.0224, -0.0230, -0.0027]],\n",
       "              \n",
       "                       [[-0.0173, -0.0185, -0.0248],\n",
       "                        [-0.0207,  0.0168,  0.0293],\n",
       "                        [-0.0244, -0.0156,  0.0025]],\n",
       "              \n",
       "                       [[ 0.0051, -0.0137, -0.0261],\n",
       "                        [-0.0283, -0.0189, -0.0048],\n",
       "                        [-0.0161,  0.0075,  0.0131]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0058,  0.0195,  0.0248],\n",
       "                        [ 0.0188,  0.0190,  0.0077],\n",
       "                        [-0.0032,  0.0097, -0.0254]],\n",
       "              \n",
       "                       [[ 0.0056, -0.0160,  0.0094],\n",
       "                        [-0.0234, -0.0182, -0.0201],\n",
       "                        [-0.0022, -0.0029,  0.0065]],\n",
       "              \n",
       "                       [[ 0.0084,  0.0209,  0.0214],\n",
       "                        [ 0.0235, -0.0165,  0.0182],\n",
       "                        [ 0.0006, -0.0222, -0.0063]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0125,  0.0196, -0.0023],\n",
       "                        [ 0.0290,  0.0125,  0.0232],\n",
       "                        [ 0.0208, -0.0156, -0.0143]],\n",
       "              \n",
       "                       [[-0.0242,  0.0065, -0.0109],\n",
       "                        [ 0.0134, -0.0200,  0.0141],\n",
       "                        [ 0.0150,  0.0120,  0.0190]],\n",
       "              \n",
       "                       [[-0.0114,  0.0254,  0.0256],\n",
       "                        [ 0.0154, -0.0158, -0.0260],\n",
       "                        [-0.0038, -0.0191, -0.0093]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0157, -0.0043,  0.0006],\n",
       "                        [ 0.0262,  0.0193, -0.0215],\n",
       "                        [-0.0034, -0.0058,  0.0214]],\n",
       "              \n",
       "                       [[ 0.0061, -0.0247,  0.0040],\n",
       "                        [ 0.0182, -0.0264,  0.0199],\n",
       "                        [-0.0037,  0.0072, -0.0234]],\n",
       "              \n",
       "                       [[ 0.0167,  0.0150, -0.0043],\n",
       "                        [ 0.0228,  0.0075,  0.0291],\n",
       "                        [-0.0198,  0.0038,  0.0082]]]])),\n",
       "             ('conv_block3.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('conv_block3.1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.1.running_mean',\n",
       "              tensor([ 0.0993, -0.0216, -0.0108, -0.0245, -0.0580, -0.0675, -0.0175,  0.0412,\n",
       "                      -0.0457,  0.0518,  0.0390,  0.1013,  0.0555, -0.0896, -0.1166, -0.0100,\n",
       "                       0.0197, -0.1025,  0.0382, -0.0252, -0.0425,  0.0018,  0.0505,  0.0503,\n",
       "                      -0.0154,  0.1228, -0.0024, -0.0916, -0.0869, -0.0077, -0.0043, -0.0533,\n",
       "                       0.0796,  0.0120,  0.0445, -0.0384, -0.0122, -0.0736, -0.0574,  0.1374,\n",
       "                       0.0133,  0.0141,  0.0556, -0.0178,  0.0145, -0.0280, -0.0283,  0.0253,\n",
       "                      -0.0779,  0.0089, -0.0958,  0.0543, -0.0241,  0.0050,  0.0379, -0.0196,\n",
       "                       0.0135,  0.0311, -0.0051, -0.0532,  0.0281,  0.0036,  0.0278, -0.0388,\n",
       "                       0.0779, -0.0644, -0.0864,  0.0390, -0.1180, -0.0578,  0.0334,  0.0346,\n",
       "                      -0.0576,  0.0993, -0.0537,  0.0608,  0.0806,  0.0528,  0.0614,  0.0280,\n",
       "                      -0.0375, -0.0187,  0.0290, -0.0925, -0.0239, -0.0491, -0.0131,  0.0539,\n",
       "                       0.1234, -0.0009, -0.0286,  0.0265, -0.0622, -0.0124,  0.0705,  0.0227,\n",
       "                       0.0445,  0.0356, -0.0021, -0.1406, -0.0589, -0.0487, -0.0127, -0.0612,\n",
       "                      -0.0394,  0.0369, -0.0076,  0.0825, -0.0064,  0.0189,  0.0711, -0.0402,\n",
       "                      -0.0053,  0.0799,  0.0634, -0.0580, -0.0680, -0.0742,  0.0853, -0.0707,\n",
       "                      -0.0759,  0.0400,  0.0635, -0.0168,  0.0363,  0.0512,  0.0168, -0.0175,\n",
       "                       0.0386, -0.0240,  0.1064,  0.0238,  0.0144,  0.0384,  0.0475,  0.0072,\n",
       "                       0.0365,  0.0061,  0.0310,  0.0045, -0.0098, -0.1162, -0.0133, -0.0051,\n",
       "                      -0.0153,  0.0194, -0.1561,  0.0294, -0.0403, -0.0528,  0.0207,  0.0130,\n",
       "                       0.0693,  0.0068, -0.0666,  0.0234, -0.0525, -0.1115,  0.0507,  0.0212,\n",
       "                      -0.0500, -0.0802,  0.0155,  0.0842, -0.0060,  0.0020,  0.1134,  0.0203,\n",
       "                      -0.0528, -0.1302, -0.0499,  0.0518, -0.0318,  0.0178,  0.0463,  0.0042,\n",
       "                       0.0336, -0.0244,  0.0499, -0.0226, -0.0382, -0.0397, -0.0034,  0.0771,\n",
       "                       0.0491, -0.0316,  0.0237, -0.0118, -0.0528,  0.0526,  0.0351, -0.0051,\n",
       "                       0.0580,  0.1536, -0.0112, -0.0158,  0.0369,  0.0765,  0.1380,  0.0507,\n",
       "                       0.0552, -0.1233,  0.0024, -0.1293,  0.0716,  0.0371, -0.0287, -0.0034,\n",
       "                       0.0167, -0.0851, -0.0131,  0.0485, -0.0216, -0.1135, -0.0148, -0.0682,\n",
       "                      -0.0270,  0.0276, -0.0517,  0.0146, -0.0035,  0.0492, -0.0884,  0.0647,\n",
       "                      -0.0183, -0.0324,  0.0514,  0.1261, -0.0130,  0.0064, -0.0242, -0.0337,\n",
       "                       0.0069,  0.0214,  0.0381,  0.0499, -0.0103,  0.0444, -0.0039,  0.1179,\n",
       "                       0.0325,  0.0500, -0.0272,  0.0678, -0.0369, -0.0532,  0.0488, -0.0406,\n",
       "                      -0.0525, -0.0404,  0.0680, -0.0582, -0.0225, -0.0051, -0.0499, -0.0431,\n",
       "                      -0.0054, -0.0128, -0.0087, -0.0348, -0.0327,  0.0314, -0.0096, -0.0443,\n",
       "                       0.0462, -0.0302,  0.0392,  0.0618, -0.0791, -0.0558,  0.0915,  0.0519,\n",
       "                      -0.0066, -0.0022, -0.0246, -0.0344,  0.0143,  0.1068,  0.0068, -0.1010,\n",
       "                       0.0201, -0.0781,  0.0254, -0.0596,  0.0894, -0.0458, -0.0358, -0.0106,\n",
       "                      -0.0057,  0.0433,  0.0044, -0.0193, -0.0247, -0.0177, -0.0102,  0.0216,\n",
       "                       0.0677, -0.0613,  0.0367,  0.0820,  0.0162, -0.1006,  0.0553,  0.0671,\n",
       "                       0.0116,  0.1321,  0.0154,  0.0221,  0.0376, -0.0380,  0.0043,  0.0268,\n",
       "                      -0.0087, -0.0079, -0.0007,  0.0963,  0.0804, -0.0035, -0.0628,  0.1210,\n",
       "                       0.1138, -0.1357, -0.0162, -0.0444,  0.0174, -0.0363, -0.0079,  0.0425,\n",
       "                      -0.0221,  0.0380, -0.0604, -0.0607, -0.0036, -0.0227,  0.0097,  0.0146,\n",
       "                      -0.0406,  0.0459,  0.0258,  0.0046, -0.0251, -0.0242, -0.1022,  0.0090,\n",
       "                       0.0257,  0.0541, -0.0628,  0.0226,  0.0826, -0.0383, -0.0614, -0.0431,\n",
       "                      -0.0266, -0.0032,  0.0347,  0.0043,  0.0114,  0.0138,  0.0401,  0.0672,\n",
       "                       0.1235,  0.0580,  0.1165,  0.0291, -0.0434,  0.0492,  0.0282,  0.0693,\n",
       "                      -0.0563,  0.0042, -0.0954,  0.0463,  0.0678, -0.0192, -0.0896, -0.0660,\n",
       "                      -0.0109, -0.0792, -0.0643,  0.0281,  0.0850,  0.0442, -0.0020,  0.0963,\n",
       "                      -0.0393,  0.0511,  0.0093, -0.0295, -0.1127,  0.0270,  0.0077,  0.0507,\n",
       "                      -0.0165,  0.0708,  0.0650, -0.0612,  0.0610,  0.0534,  0.0028, -0.0375,\n",
       "                      -0.0106,  0.0380,  0.0744, -0.0470, -0.0203, -0.0104,  0.0048,  0.0613,\n",
       "                       0.0387,  0.0564,  0.0060, -0.0173,  0.0246,  0.0091, -0.0717, -0.0364,\n",
       "                      -0.0487,  0.0044, -0.0653, -0.0621, -0.0560,  0.0900, -0.0335,  0.0073,\n",
       "                       0.0052, -0.0350, -0.0428,  0.0112, -0.0972, -0.1018,  0.1066,  0.0978,\n",
       "                      -0.0008,  0.0349, -0.0449, -0.0165,  0.0167,  0.0648, -0.0048, -0.0442,\n",
       "                       0.0142, -0.0323, -0.0078, -0.0361,  0.0685,  0.0375,  0.0297,  0.0239,\n",
       "                      -0.0377, -0.0177,  0.0108, -0.0605,  0.0940,  0.0277,  0.0426,  0.0695,\n",
       "                       0.0159, -0.0805,  0.0192, -0.0148,  0.1062,  0.0126,  0.0107, -0.0237,\n",
       "                      -0.0286,  0.0258, -0.0019,  0.0386, -0.0636,  0.0305, -0.0048, -0.0092,\n",
       "                       0.1446, -0.0005, -0.0262,  0.0167, -0.0042,  0.0179, -0.0170,  0.0280,\n",
       "                      -0.0717,  0.1036,  0.0205,  0.0710, -0.0868,  0.0781,  0.0414,  0.1413,\n",
       "                      -0.0622, -0.0043,  0.0623, -0.0134, -0.0254,  0.0552,  0.0299,  0.0536,\n",
       "                      -0.0889, -0.0147, -0.0645,  0.0535,  0.0555, -0.0580, -0.0404,  0.0483,\n",
       "                       0.0243,  0.0202, -0.0708, -0.0672, -0.0652,  0.0759,  0.0549, -0.0033])),\n",
       "             ('conv_block3.1.running_var',\n",
       "              tensor([0.9492, 0.9296, 0.9280, 0.9298, 0.9385, 0.9366, 0.9319, 0.9387, 0.9485,\n",
       "                      0.9437, 0.9289, 0.9517, 0.9333, 0.9441, 0.9599, 0.9461, 0.9283, 0.9424,\n",
       "                      0.9308, 0.9304, 0.9350, 0.9326, 0.9474, 0.9411, 0.9284, 0.9513, 0.9239,\n",
       "                      0.9457, 0.9341, 0.9392, 0.9463, 0.9375, 0.9430, 0.9392, 0.9334, 0.9531,\n",
       "                      0.9515, 0.9491, 0.9427, 0.9751, 0.9429, 0.9314, 0.9318, 0.9289, 0.9304,\n",
       "                      0.9312, 0.9315, 0.9339, 0.9384, 0.9344, 0.9558, 0.9341, 0.9288, 0.9307,\n",
       "                      0.9434, 0.9447, 0.9377, 0.9322, 0.9357, 0.9334, 0.9299, 0.9380, 0.9316,\n",
       "                      0.9418, 0.9395, 0.9356, 0.9356, 0.9357, 0.9560, 0.9335, 0.9584, 0.9522,\n",
       "                      0.9406, 0.9269, 0.9332, 0.9253, 0.9559, 0.9402, 0.9324, 0.9305, 0.9479,\n",
       "                      0.9297, 0.9299, 0.9400, 0.9464, 0.9292, 0.9300, 0.9406, 0.9611, 0.9325,\n",
       "                      0.9398, 0.9264, 0.9301, 0.9353, 0.9401, 0.9264, 0.9364, 0.9428, 0.9372,\n",
       "                      0.9461, 0.9304, 0.9380, 0.9311, 0.9311, 0.9409, 0.9439, 0.9360, 0.9515,\n",
       "                      0.9382, 0.9482, 0.9295, 0.9409, 0.9352, 0.9400, 0.9500, 0.9317, 0.9352,\n",
       "                      0.9361, 0.9471, 0.9329, 0.9378, 0.9273, 0.9520, 0.9407, 0.9391, 0.9341,\n",
       "                      0.9289, 0.9350, 0.9328, 0.9328, 0.9629, 0.9327, 0.9355, 0.9322, 0.9372,\n",
       "                      0.9630, 0.9509, 0.9535, 0.9275, 0.9387, 0.9311, 0.9435, 0.9265, 0.9306,\n",
       "                      0.9417, 0.9329, 0.9511, 0.9268, 0.9433, 0.9329, 0.9344, 0.9330, 0.9384,\n",
       "                      0.9329, 0.9393, 0.9298, 0.9426, 0.9700, 0.9411, 0.9410, 0.9310, 0.9396,\n",
       "                      0.9260, 0.9343, 0.9366, 0.9353, 0.9469, 0.9320, 0.9305, 0.9420, 0.9377,\n",
       "                      0.9401, 0.9402, 0.9362, 0.9292, 0.9348, 0.9351, 0.9467, 0.9489, 0.9385,\n",
       "                      0.9317, 0.9335, 0.9272, 0.9294, 0.9418, 0.9305, 0.9319, 0.9619, 0.9273,\n",
       "                      0.9233, 0.9345, 0.9409, 0.9317, 0.9629, 0.9357, 0.9444, 0.9480, 0.9523,\n",
       "                      0.9492, 0.9310, 0.9356, 0.9662, 0.9299, 0.9617, 0.9372, 0.9326, 0.9227,\n",
       "                      0.9394, 0.9456, 0.9241, 0.9336, 0.9323, 0.9286, 0.9396, 0.9317, 0.9360,\n",
       "                      0.9269, 0.9294, 0.9323, 0.9299, 0.9325, 0.9331, 0.9510, 0.9364, 0.9297,\n",
       "                      0.9251, 0.9611, 0.9521, 0.9376, 0.9326, 0.9274, 0.9311, 0.9445, 0.9427,\n",
       "                      0.9287, 0.9474, 0.9389, 0.9356, 0.9392, 0.9428, 0.9328, 0.9340, 0.9402,\n",
       "                      0.9319, 0.9545, 0.9520, 0.9289, 0.9418, 0.9326, 0.9275, 0.9369, 0.9495,\n",
       "                      0.9292, 0.9327, 0.9527, 0.9422, 0.9351, 0.9280, 0.9294, 0.9478, 0.9299,\n",
       "                      0.9387, 0.9327, 0.9438, 0.9487, 0.9296, 0.9370, 0.9380, 0.9307, 0.9356,\n",
       "                      0.9557, 0.9295, 0.9432, 0.9321, 0.9423, 0.9326, 0.9498, 0.9485, 0.9347,\n",
       "                      0.9389, 0.9245, 0.9405, 0.9323, 0.9534, 0.9335, 0.9464, 0.9363, 0.9302,\n",
       "                      0.9346, 0.9481, 0.9278, 0.9357, 0.9418, 0.9434, 0.9381, 0.9307, 0.9281,\n",
       "                      0.9337, 0.9402, 0.9453, 0.9360, 0.9507, 0.9394, 0.9393, 0.9373, 0.9516,\n",
       "                      0.9386, 0.9253, 0.9476, 0.9320, 0.9451, 0.9316, 0.9446, 0.9388, 0.9323,\n",
       "                      0.9551, 0.9356, 0.9287, 0.9335, 0.9469, 0.9521, 0.9536, 0.9340, 0.9534,\n",
       "                      0.9386, 0.9419, 0.9349, 0.9491, 0.9295, 0.9324, 0.9313, 0.9338, 0.9356,\n",
       "                      0.9287, 0.9326, 0.9350, 0.9314, 0.9361, 0.9295, 0.9309, 0.9396, 0.9348,\n",
       "                      0.9466, 0.9334, 0.9238, 0.9350, 0.9321, 0.9299, 0.9690, 0.9287, 0.9374,\n",
       "                      0.9311, 0.9332, 0.9412, 0.9318, 0.9370, 0.9346, 0.9273, 0.9394, 0.9504,\n",
       "                      0.9706, 0.9343, 0.9469, 0.9339, 0.9468, 0.9377, 0.9348, 0.9414, 0.9413,\n",
       "                      0.9249, 0.9441, 0.9363, 0.9407, 0.9459, 0.9410, 0.9289, 0.9268, 0.9829,\n",
       "                      0.9510, 0.9467, 0.9330, 0.9442, 0.9369, 0.9375, 0.9301, 0.9376, 0.9279,\n",
       "                      0.9346, 0.9378, 0.9278, 0.9321, 0.9321, 0.9460, 0.9280, 0.9428, 0.9410,\n",
       "                      0.9449, 0.9456, 0.9265, 0.9507, 0.9382, 0.9266, 0.9470, 0.9369, 0.9253,\n",
       "                      0.9257, 0.9261, 0.9336, 0.9523, 0.9384, 0.9276, 0.9312, 0.9313, 0.9294,\n",
       "                      0.9571, 0.9534, 0.9408, 0.9328, 0.9495, 0.9296, 0.9413, 0.9468, 0.9286,\n",
       "                      0.9346, 0.9419, 0.9351, 0.9374, 0.9231, 0.9496, 0.9357, 0.9438, 0.9322,\n",
       "                      0.9389, 0.9292, 0.9504, 0.9282, 0.9299, 0.9368, 0.9453, 0.9302, 0.9302,\n",
       "                      0.9306, 0.9285, 0.9430, 0.9442, 0.9431, 0.9442, 0.9375, 0.9267, 0.9326,\n",
       "                      0.9427, 0.9333, 0.9334, 0.9338, 0.9395, 0.9345, 0.9265, 0.9383, 0.9371,\n",
       "                      0.9315, 0.9492, 0.9333, 0.9388, 0.9310, 0.9359, 0.9350, 0.9331, 0.9555,\n",
       "                      0.9288, 0.9374, 0.9345, 0.9452, 0.9487, 0.9275, 0.9325, 0.9359, 0.9267,\n",
       "                      0.9278, 0.9306, 0.9350, 0.9371, 0.9392, 0.9310, 0.9330, 0.9547, 0.9344,\n",
       "                      0.9356, 0.9366, 0.9495, 0.9308, 0.9359, 0.9337, 0.9543, 0.9401, 0.9280,\n",
       "                      0.9316, 0.9378, 0.9323, 0.9632, 0.9478, 0.9447, 0.9307, 0.9344, 0.9319,\n",
       "                      0.9334, 0.9632, 0.9369, 0.9422, 0.9552, 0.9381, 0.9546, 0.9327])),\n",
       "             ('conv_block3.1.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block3.3.weight',\n",
       "              tensor([[[[-0.0137,  0.0132,  0.0225],\n",
       "                        [ 0.0200,  0.0222, -0.0117],\n",
       "                        [-0.0012,  0.0055,  0.0158]],\n",
       "              \n",
       "                       [[-0.0109,  0.0045, -0.0109],\n",
       "                        [-0.0165, -0.0020, -0.0121],\n",
       "                        [ 0.0170,  0.0066,  0.0141]],\n",
       "              \n",
       "                       [[-0.0061,  0.0153, -0.0126],\n",
       "                        [ 0.0089,  0.0137, -0.0211],\n",
       "                        [ 0.0104, -0.0169, -0.0236]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0183, -0.0154, -0.0160],\n",
       "                        [ 0.0090, -0.0174,  0.0059],\n",
       "                        [-0.0139, -0.0053, -0.0073]],\n",
       "              \n",
       "                       [[-0.0108, -0.0177, -0.0102],\n",
       "                        [ 0.0183,  0.0205, -0.0138],\n",
       "                        [ 0.0033,  0.0180, -0.0070]],\n",
       "              \n",
       "                       [[ 0.0195, -0.0042, -0.0186],\n",
       "                        [-0.0027, -0.0173, -0.0062],\n",
       "                        [ 0.0173,  0.0052,  0.0104]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0041,  0.0094,  0.0150],\n",
       "                        [ 0.0096, -0.0145,  0.0110],\n",
       "                        [-0.0246, -0.0157,  0.0145]],\n",
       "              \n",
       "                       [[-0.0239,  0.0040, -0.0206],\n",
       "                        [-0.0084, -0.0138, -0.0038],\n",
       "                        [ 0.0144, -0.0209,  0.0060]],\n",
       "              \n",
       "                       [[-0.0253, -0.0134, -0.0051],\n",
       "                        [ 0.0038,  0.0129, -0.0013],\n",
       "                        [ 0.0037,  0.0219, -0.0183]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0140, -0.0213, -0.0220],\n",
       "                        [-0.0120, -0.0231,  0.0134],\n",
       "                        [-0.0101,  0.0024,  0.0169]],\n",
       "              \n",
       "                       [[ 0.0251,  0.0146,  0.0117],\n",
       "                        [-0.0037, -0.0127,  0.0095],\n",
       "                        [ 0.0019, -0.0108, -0.0069]],\n",
       "              \n",
       "                       [[ 0.0045, -0.0098, -0.0192],\n",
       "                        [-0.0080, -0.0182,  0.0179],\n",
       "                        [ 0.0246,  0.0111,  0.0222]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0080, -0.0035, -0.0164],\n",
       "                        [-0.0074, -0.0049, -0.0174],\n",
       "                        [-0.0131,  0.0160,  0.0169]],\n",
       "              \n",
       "                       [[ 0.0055, -0.0064, -0.0195],\n",
       "                        [ 0.0021, -0.0180, -0.0083],\n",
       "                        [ 0.0168,  0.0164,  0.0249]],\n",
       "              \n",
       "                       [[ 0.0209, -0.0159,  0.0168],\n",
       "                        [ 0.0035,  0.0139, -0.0138],\n",
       "                        [ 0.0045, -0.0199,  0.0153]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0132, -0.0157,  0.0115],\n",
       "                        [ 0.0208,  0.0129, -0.0102],\n",
       "                        [-0.0039, -0.0250,  0.0164]],\n",
       "              \n",
       "                       [[-0.0144, -0.0063, -0.0006],\n",
       "                        [-0.0121, -0.0008, -0.0160],\n",
       "                        [-0.0090, -0.0109, -0.0049]],\n",
       "              \n",
       "                       [[-0.0013, -0.0243,  0.0083],\n",
       "                        [ 0.0208,  0.0024, -0.0243],\n",
       "                        [-0.0069, -0.0133, -0.0058]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0228,  0.0028, -0.0014],\n",
       "                        [-0.0246,  0.0009,  0.0036],\n",
       "                        [ 0.0237, -0.0253,  0.0102]],\n",
       "              \n",
       "                       [[-0.0055, -0.0199,  0.0233],\n",
       "                        [ 0.0161,  0.0239, -0.0169],\n",
       "                        [ 0.0077, -0.0065, -0.0054]],\n",
       "              \n",
       "                       [[-0.0250,  0.0129, -0.0057],\n",
       "                        [-0.0009, -0.0200, -0.0194],\n",
       "                        [-0.0138, -0.0195, -0.0090]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0163,  0.0198,  0.0183],\n",
       "                        [-0.0245, -0.0236,  0.0078],\n",
       "                        [-0.0040, -0.0130,  0.0125]],\n",
       "              \n",
       "                       [[-0.0060,  0.0106, -0.0030],\n",
       "                        [ 0.0220,  0.0057, -0.0111],\n",
       "                        [ 0.0153,  0.0004, -0.0054]],\n",
       "              \n",
       "                       [[ 0.0160, -0.0123,  0.0037],\n",
       "                        [ 0.0023, -0.0001, -0.0104],\n",
       "                        [-0.0133,  0.0002,  0.0157]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0110,  0.0070,  0.0034],\n",
       "                        [ 0.0167,  0.0225,  0.0024],\n",
       "                        [-0.0143, -0.0181, -0.0041]],\n",
       "              \n",
       "                       [[-0.0228, -0.0053, -0.0034],\n",
       "                        [ 0.0002, -0.0191,  0.0134],\n",
       "                        [-0.0022, -0.0115, -0.0145]],\n",
       "              \n",
       "                       [[ 0.0218,  0.0081, -0.0064],\n",
       "                        [ 0.0226,  0.0160, -0.0101],\n",
       "                        [-0.0020, -0.0150, -0.0156]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0024, -0.0114, -0.0011],\n",
       "                        [-0.0076,  0.0254, -0.0237],\n",
       "                        [ 0.0011,  0.0055, -0.0087]],\n",
       "              \n",
       "                       [[ 0.0056,  0.0197, -0.0245],\n",
       "                        [-0.0181,  0.0181, -0.0035],\n",
       "                        [-0.0192,  0.0046,  0.0155]],\n",
       "              \n",
       "                       [[ 0.0237, -0.0032, -0.0093],\n",
       "                        [-0.0221,  0.0134, -0.0115],\n",
       "                        [-0.0084,  0.0120,  0.0048]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0162,  0.0129, -0.0177],\n",
       "                        [-0.0027, -0.0090, -0.0252],\n",
       "                        [ 0.0145,  0.0241,  0.0009]],\n",
       "              \n",
       "                       [[-0.0233, -0.0083,  0.0132],\n",
       "                        [ 0.0029,  0.0179,  0.0134],\n",
       "                        [ 0.0235, -0.0245, -0.0003]],\n",
       "              \n",
       "                       [[ 0.0231, -0.0010, -0.0183],\n",
       "                        [ 0.0124,  0.0032,  0.0157],\n",
       "                        [-0.0002,  0.0070, -0.0219]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0070,  0.0039,  0.0155],\n",
       "                        [-0.0121, -0.0109,  0.0253],\n",
       "                        [-0.0205, -0.0038, -0.0060]],\n",
       "              \n",
       "                       [[-0.0107, -0.0055, -0.0243],\n",
       "                        [ 0.0254,  0.0223,  0.0123],\n",
       "                        [ 0.0137,  0.0082,  0.0188]],\n",
       "              \n",
       "                       [[ 0.0048, -0.0160, -0.0050],\n",
       "                        [ 0.0025, -0.0102,  0.0007],\n",
       "                        [ 0.0072, -0.0219, -0.0218]]]])),\n",
       "             ('conv_block3.3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('conv_block3.4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.4.running_mean',\n",
       "              tensor([-2.3875e-02, -3.0523e-02,  4.5126e-02,  8.9714e-02, -2.6987e-02,\n",
       "                      -3.3401e-03,  2.0733e-02,  6.4066e-02,  4.7695e-03,  9.8755e-03,\n",
       "                      -1.1454e-02,  3.7472e-03,  3.1429e-02, -2.1299e-02, -3.3351e-02,\n",
       "                       3.0655e-02,  1.1632e-02, -1.8267e-02,  1.6447e-02, -1.7125e-02,\n",
       "                       6.2993e-02, -2.8167e-02,  4.5084e-02,  2.5531e-02, -3.8510e-02,\n",
       "                      -2.2215e-02,  4.5874e-03,  2.9098e-02, -1.1416e-02, -1.8978e-02,\n",
       "                      -5.5371e-02, -1.7632e-02, -5.7808e-03,  3.6106e-02,  1.9910e-02,\n",
       "                      -3.0111e-02,  3.8523e-02,  9.6076e-03,  1.0526e-02,  3.2149e-02,\n",
       "                       7.1071e-02, -3.0495e-02, -7.6694e-03,  3.3521e-02, -2.5379e-02,\n",
       "                      -1.4492e-02,  3.3234e-02,  1.5535e-02,  2.5268e-02, -2.6470e-02,\n",
       "                      -5.3355e-02,  2.1657e-02, -4.4822e-02, -7.3711e-02,  3.7505e-02,\n",
       "                      -2.2555e-02,  4.2520e-02, -3.3533e-02, -2.4939e-02,  2.7633e-02,\n",
       "                      -4.2978e-02,  3.9648e-02,  1.7570e-02, -3.2982e-02,  1.7053e-02,\n",
       "                       3.0672e-02,  2.2520e-02, -1.7797e-03, -3.1689e-02,  3.3091e-02,\n",
       "                      -3.7042e-02,  8.3803e-03, -1.6645e-02,  3.8603e-03,  7.6079e-03,\n",
       "                      -1.2182e-02,  2.0131e-03, -3.8781e-02,  3.3301e-04,  3.1804e-02,\n",
       "                       6.9736e-02, -1.6436e-02, -4.7127e-02, -4.2121e-02, -5.0068e-02,\n",
       "                       1.7897e-02, -4.3977e-02,  8.1663e-03, -2.4190e-02,  2.9342e-02,\n",
       "                      -3.0459e-02, -6.3497e-02, -8.6429e-03, -6.9080e-03,  4.8605e-02,\n",
       "                      -1.2495e-02, -1.7871e-02,  2.1774e-02, -4.3665e-02,  3.7205e-02,\n",
       "                      -1.3627e-02, -1.7828e-02, -5.8215e-03,  3.2658e-02,  2.0076e-02,\n",
       "                       1.2462e-02, -9.6589e-04,  1.2812e-02, -9.0744e-05,  1.5154e-02,\n",
       "                      -4.8546e-02, -6.4144e-02,  1.8401e-02,  3.1260e-02,  1.5750e-02,\n",
       "                      -1.6038e-02, -2.6802e-02, -1.8609e-03, -2.6476e-02,  1.0942e-02,\n",
       "                      -2.8943e-02, -3.9669e-03, -4.9008e-02,  2.8527e-02, -1.6612e-02,\n",
       "                       4.4557e-02,  7.0561e-02,  1.3113e-03, -1.5747e-02,  5.3693e-02,\n",
       "                       1.0168e-02,  1.4764e-02,  4.2281e-03,  7.5637e-02, -4.0351e-03,\n",
       "                       3.8143e-02, -2.4765e-03, -1.3374e-02, -2.2690e-02, -1.5804e-02,\n",
       "                       1.9745e-02, -2.5326e-02,  5.9942e-02, -2.0562e-02, -1.7529e-02,\n",
       "                       3.3588e-02,  2.5572e-03, -2.6970e-02, -3.1235e-02, -6.4512e-02,\n",
       "                      -2.3128e-02,  5.2281e-03, -2.9303e-02, -1.7032e-02,  3.9624e-02,\n",
       "                       1.1364e-02,  3.8620e-02,  5.0706e-02, -1.3201e-02,  3.0225e-02,\n",
       "                       4.5954e-02,  1.7199e-02, -3.4758e-02,  1.3850e-02, -2.7110e-02,\n",
       "                       1.7540e-02, -3.8225e-02, -2.0614e-02,  2.0077e-04,  2.9581e-02,\n",
       "                       3.5835e-02, -5.0934e-03, -3.1847e-02,  1.8345e-02,  2.4128e-02,\n",
       "                       4.1674e-02, -5.0989e-03,  3.1123e-02,  2.3559e-02, -6.0765e-02,\n",
       "                      -3.3632e-02,  3.5536e-02,  5.4572e-02, -2.8253e-02, -1.0932e-02,\n",
       "                      -2.8508e-02,  3.9475e-02,  5.1071e-02, -2.1060e-02, -5.1042e-02,\n",
       "                       2.4544e-02, -4.8405e-02,  9.1533e-02, -1.2230e-02, -6.4543e-02,\n",
       "                       7.9319e-02,  1.8511e-02,  1.9276e-02,  2.9872e-02,  2.9851e-02,\n",
       "                       5.4786e-03,  5.5538e-02,  4.7373e-02,  4.9535e-02, -2.2990e-02,\n",
       "                       1.8618e-02, -1.4890e-02,  9.6606e-03, -3.1503e-02,  3.2423e-02,\n",
       "                       1.4238e-02,  1.9803e-02,  6.8669e-03, -2.8612e-02, -5.3987e-03,\n",
       "                       4.4457e-02,  2.5176e-02, -1.8032e-02,  4.7841e-02,  3.0444e-02,\n",
       "                       3.0689e-02,  2.8108e-02, -2.6788e-02, -7.1927e-02,  2.8065e-02,\n",
       "                      -3.0886e-02,  2.5438e-02, -5.0396e-02,  2.7762e-02,  7.9561e-03,\n",
       "                       2.5673e-02, -4.9705e-02,  2.3278e-02,  1.1184e-02,  2.0725e-02,\n",
       "                       6.5781e-02,  2.0661e-03,  2.2975e-02,  3.5342e-02, -3.1844e-02,\n",
       "                      -1.4087e-02,  1.6073e-02, -8.9274e-03, -3.6145e-02, -7.6539e-03,\n",
       "                      -5.5632e-02, -1.6863e-02,  1.8005e-02, -3.4866e-03, -3.3770e-03,\n",
       "                      -9.0975e-03,  7.9565e-02,  1.6913e-02,  2.0134e-02,  2.1725e-02,\n",
       "                       4.2646e-03,  4.5716e-02, -2.6018e-02, -4.1006e-02,  1.1584e-02,\n",
       "                      -2.7128e-02,  4.8308e-03,  6.5513e-02,  5.5388e-02,  2.0069e-02,\n",
       "                      -4.7564e-02,  2.1507e-02,  2.7863e-02, -2.9222e-02,  8.0782e-03,\n",
       "                      -5.7953e-02, -5.6061e-02,  1.5054e-02, -4.1545e-03,  2.0133e-02,\n",
       "                      -6.3369e-02, -4.0622e-03,  3.1216e-02,  5.6039e-02,  3.3282e-02,\n",
       "                       1.5958e-02,  1.2411e-01,  4.9896e-02, -2.3296e-02,  2.5142e-02,\n",
       "                      -3.2291e-02,  7.0046e-03, -2.0534e-03, -5.4379e-03, -2.0800e-02,\n",
       "                      -3.1562e-02, -2.3536e-03, -1.3809e-02,  3.4286e-02,  1.3339e-02,\n",
       "                      -5.9584e-02, -6.1031e-02, -4.1022e-02, -1.6721e-02, -1.0943e-02,\n",
       "                       1.4041e-02,  1.5372e-02,  2.8177e-02,  3.1539e-02, -3.3254e-02,\n",
       "                      -5.8952e-03, -1.0879e-02,  4.2804e-02, -4.9217e-03,  7.8356e-04,\n",
       "                       1.7437e-02, -1.0358e-02,  4.7028e-02, -4.4061e-02,  1.0665e-02,\n",
       "                       4.3272e-02,  1.5441e-02,  1.0603e-02,  1.3395e-02, -3.8564e-02,\n",
       "                       2.9887e-02,  7.3384e-02, -5.3357e-02, -3.9387e-02,  8.2394e-03,\n",
       "                      -2.8211e-02, -9.3337e-03, -1.4228e-02,  1.5260e-03, -1.7428e-02,\n",
       "                       4.8332e-03, -3.1381e-02, -7.7544e-02,  3.1494e-02, -5.4165e-02,\n",
       "                       3.0041e-02,  2.0965e-03, -1.6213e-02,  2.2056e-02,  4.8712e-03,\n",
       "                      -2.6772e-02,  2.0860e-02,  6.6504e-02,  1.6520e-03, -4.4006e-03,\n",
       "                       5.6923e-03,  2.0171e-02,  7.8247e-02,  8.3135e-03, -4.1938e-02,\n",
       "                      -2.9130e-02, -1.0065e-03,  8.5710e-03,  4.2263e-02,  2.0311e-02,\n",
       "                       2.3673e-02,  1.6195e-02, -2.0723e-02,  4.7588e-02,  2.1567e-02,\n",
       "                      -6.7056e-02,  4.3041e-02, -2.2745e-03,  2.5500e-03,  1.4991e-03,\n",
       "                      -1.1927e-02, -1.5417e-02,  1.3544e-02, -5.3195e-02, -2.2160e-02,\n",
       "                       8.8368e-03, -9.3340e-03,  5.4311e-02,  1.7055e-02,  1.9499e-02,\n",
       "                      -2.4502e-02,  3.8181e-02, -7.1696e-03, -8.0037e-03, -2.4822e-03,\n",
       "                       2.3689e-02,  4.5655e-02,  6.2655e-03,  3.3317e-02, -2.9564e-02,\n",
       "                      -8.2146e-03, -4.2891e-03, -1.7102e-02, -5.0434e-02,  1.7059e-02,\n",
       "                      -2.4334e-02, -6.7982e-03, -4.4762e-02, -2.0473e-02,  1.7221e-02,\n",
       "                       9.8948e-03, -2.2034e-02,  2.1479e-02,  2.1785e-02,  1.0731e-02,\n",
       "                      -3.9326e-02, -6.8563e-03, -9.8351e-03,  5.5244e-02, -2.5810e-02,\n",
       "                       3.4727e-03,  1.5792e-02,  4.3762e-02,  6.4948e-03, -6.3404e-03,\n",
       "                       8.6849e-02,  7.6933e-03, -2.1644e-02, -2.4844e-02, -5.9238e-02,\n",
       "                       1.8758e-02,  7.9022e-03,  1.2829e-02,  5.0435e-03, -3.7289e-02,\n",
       "                      -1.6347e-02, -4.3247e-03, -2.2143e-02, -6.7104e-03, -5.3336e-02,\n",
       "                      -8.7783e-03, -2.4326e-02,  6.9842e-02, -1.9190e-02,  7.1895e-02,\n",
       "                       3.4594e-02,  6.8224e-03,  2.1937e-02,  7.5706e-03,  1.1778e-02,\n",
       "                      -2.3739e-02, -9.5285e-03, -1.8809e-02,  3.2313e-02, -1.8140e-02,\n",
       "                      -7.3936e-03,  2.1436e-02, -2.3217e-02,  3.6531e-02,  3.1490e-02,\n",
       "                       1.6411e-02,  5.8390e-02, -1.8166e-02, -1.3370e-02, -1.5808e-02,\n",
       "                       1.1214e-02,  1.2127e-02,  6.1279e-02,  7.4019e-03, -4.9323e-02,\n",
       "                       3.5217e-02,  2.5837e-02, -2.1330e-03,  1.8957e-02, -3.6103e-02,\n",
       "                      -2.9567e-02, -4.2879e-03, -3.0293e-02, -9.7586e-02, -2.4525e-02,\n",
       "                       1.5722e-02, -8.4801e-03,  6.5999e-02, -2.2040e-02, -3.3001e-02,\n",
       "                       2.3161e-02,  4.6012e-02,  4.9498e-02,  2.7548e-02, -1.4209e-02,\n",
       "                      -5.1780e-03,  8.4537e-02, -3.9781e-02,  3.7125e-02, -2.9006e-02,\n",
       "                      -3.0282e-02, -4.1038e-02, -5.6201e-02, -2.6195e-02, -1.5601e-02,\n",
       "                       5.8658e-03,  4.8884e-03,  2.2494e-02, -6.3062e-03,  2.2998e-02,\n",
       "                       3.8308e-02, -2.3774e-02, -2.4307e-02, -4.1628e-02,  1.6219e-02,\n",
       "                      -7.8223e-03, -4.9808e-02,  4.0197e-02,  1.6895e-02,  2.3269e-02,\n",
       "                       2.6661e-02, -1.8115e-02,  5.3343e-02, -1.7148e-02, -3.3053e-02,\n",
       "                       4.6277e-03,  9.3695e-04, -5.3358e-02,  5.8643e-03, -7.9407e-04,\n",
       "                       2.6981e-02, -2.3470e-02])),\n",
       "             ('conv_block3.4.running_var',\n",
       "              tensor([0.9312, 0.9362, 0.9370, 0.9342, 0.9266, 0.9338, 0.9258, 0.9277, 0.9319,\n",
       "                      0.9229, 0.9387, 0.9266, 0.9304, 0.9314, 0.9324, 0.9326, 0.9294, 0.9274,\n",
       "                      0.9335, 0.9272, 0.9253, 0.9308, 0.9325, 0.9316, 0.9308, 0.9243, 0.9279,\n",
       "                      0.9273, 0.9313, 0.9333, 0.9320, 0.9327, 0.9287, 0.9295, 0.9338, 0.9401,\n",
       "                      0.9352, 0.9267, 0.9252, 0.9248, 0.9379, 0.9293, 0.9299, 0.9330, 0.9255,\n",
       "                      0.9280, 0.9305, 0.9292, 0.9245, 0.9297, 0.9287, 0.9273, 0.9300, 0.9413,\n",
       "                      0.9291, 0.9387, 0.9442, 0.9295, 0.9283, 0.9288, 0.9287, 0.9351, 0.9337,\n",
       "                      0.9283, 0.9273, 0.9314, 0.9282, 0.9271, 0.9280, 0.9336, 0.9360, 0.9308,\n",
       "                      0.9226, 0.9243, 0.9345, 0.9360, 0.9269, 0.9334, 0.9365, 0.9268, 0.9260,\n",
       "                      0.9265, 0.9411, 0.9320, 0.9307, 0.9322, 0.9373, 0.9289, 0.9285, 0.9300,\n",
       "                      0.9263, 0.9360, 0.9349, 0.9298, 0.9291, 0.9265, 0.9295, 0.9288, 0.9332,\n",
       "                      0.9231, 0.9274, 0.9292, 0.9330, 0.9340, 0.9290, 0.9242, 0.9266, 0.9313,\n",
       "                      0.9345, 0.9340, 0.9290, 0.9278, 0.9267, 0.9289, 0.9261, 0.9291, 0.9240,\n",
       "                      0.9250, 0.9263, 0.9296, 0.9264, 0.9366, 0.9261, 0.9450, 0.9295, 0.9336,\n",
       "                      0.9343, 0.9267, 0.9339, 0.9262, 0.9243, 0.9342, 0.9301, 0.9335, 0.9328,\n",
       "                      0.9342, 0.9348, 0.9370, 0.9326, 0.9263, 0.9366, 0.9324, 0.9263, 0.9314,\n",
       "                      0.9276, 0.9314, 0.9252, 0.9242, 0.9273, 0.9377, 0.9367, 0.9230, 0.9428,\n",
       "                      0.9318, 0.9410, 0.9352, 0.9356, 0.9328, 0.9214, 0.9299, 0.9296, 0.9372,\n",
       "                      0.9286, 0.9312, 0.9313, 0.9258, 0.9327, 0.9307, 0.9329, 0.9251, 0.9280,\n",
       "                      0.9388, 0.9302, 0.9402, 0.9256, 0.9278, 0.9281, 0.9399, 0.9298, 0.9292,\n",
       "                      0.9305, 0.9303, 0.9314, 0.9295, 0.9253, 0.9360, 0.9376, 0.9264, 0.9302,\n",
       "                      0.9259, 0.9306, 0.9337, 0.9270, 0.9275, 0.9307, 0.9305, 0.9225, 0.9371,\n",
       "                      0.9358, 0.9337, 0.9304, 0.9359, 0.9319, 0.9321, 0.9331, 0.9318, 0.9318,\n",
       "                      0.9296, 0.9298, 0.9328, 0.9283, 0.9330, 0.9262, 0.9287, 0.9292, 0.9314,\n",
       "                      0.9268, 0.9308, 0.9252, 0.9362, 0.9360, 0.9275, 0.9328, 0.9232, 0.9336,\n",
       "                      0.9319, 0.9263, 0.9279, 0.9352, 0.9317, 0.9265, 0.9447, 0.9299, 0.9260,\n",
       "                      0.9336, 0.9360, 0.9274, 0.9283, 0.9303, 0.9324, 0.9340, 0.9312, 0.9302,\n",
       "                      0.9247, 0.9351, 0.9252, 0.9283, 0.9323, 0.9298, 0.9307, 0.9284, 0.9354,\n",
       "                      0.9268, 0.9324, 0.9280, 0.9276, 0.9296, 0.9333, 0.9406, 0.9278, 0.9338,\n",
       "                      0.9336, 0.9410, 0.9296, 0.9315, 0.9407, 0.9268, 0.9291, 0.9320, 0.9271,\n",
       "                      0.9444, 0.9289, 0.9373, 0.9241, 0.9321, 0.9333, 0.9291, 0.9330, 0.9288,\n",
       "                      0.9245, 0.9280, 0.9355, 0.9434, 0.9278, 0.9267, 0.9351, 0.9274, 0.9395,\n",
       "                      0.9262, 0.9292, 0.9230, 0.9324, 0.9282, 0.9247, 0.9275, 0.9255, 0.9317,\n",
       "                      0.9373, 0.9274, 0.9324, 0.9319, 0.9249, 0.9272, 0.9368, 0.9335, 0.9297,\n",
       "                      0.9333, 0.9345, 0.9255, 0.9244, 0.9346, 0.9344, 0.9309, 0.9260, 0.9258,\n",
       "                      0.9234, 0.9321, 0.9268, 0.9294, 0.9304, 0.9269, 0.9465, 0.9273, 0.9336,\n",
       "                      0.9300, 0.9280, 0.9277, 0.9308, 0.9269, 0.9359, 0.9365, 0.9249, 0.9255,\n",
       "                      0.9294, 0.9322, 0.9371, 0.9346, 0.9236, 0.9324, 0.9271, 0.9358, 0.9277,\n",
       "                      0.9345, 0.9235, 0.9430, 0.9366, 0.9278, 0.9305, 0.9285, 0.9362, 0.9325,\n",
       "                      0.9291, 0.9251, 0.9318, 0.9275, 0.9390, 0.9375, 0.9359, 0.9288, 0.9302,\n",
       "                      0.9289, 0.9372, 0.9263, 0.9315, 0.9294, 0.9281, 0.9437, 0.9274, 0.9318,\n",
       "                      0.9276, 0.9291, 0.9256, 0.9267, 0.9328, 0.9280, 0.9266, 0.9328, 0.9339,\n",
       "                      0.9331, 0.9278, 0.9279, 0.9360, 0.9292, 0.9279, 0.9294, 0.9260, 0.9296,\n",
       "                      0.9289, 0.9324, 0.9296, 0.9335, 0.9295, 0.9376, 0.9251, 0.9287, 0.9306,\n",
       "                      0.9296, 0.9468, 0.9316, 0.9385, 0.9302, 0.9365, 0.9281, 0.9298, 0.9294,\n",
       "                      0.9280, 0.9278, 0.9278, 0.9345, 0.9293, 0.9376, 0.9273, 0.9321, 0.9302,\n",
       "                      0.9256, 0.9377, 0.9291, 0.9314, 0.9299, 0.9262, 0.9286, 0.9337, 0.9300,\n",
       "                      0.9331, 0.9289, 0.9343, 0.9298, 0.9461, 0.9260, 0.9322, 0.9542, 0.9241,\n",
       "                      0.9291, 0.9270, 0.9398, 0.9309, 0.9275, 0.9362, 0.9353, 0.9299, 0.9327,\n",
       "                      0.9381, 0.9298, 0.9349, 0.9282, 0.9236, 0.9298, 0.9282, 0.9360, 0.9311,\n",
       "                      0.9273, 0.9365, 0.9275, 0.9291, 0.9410, 0.9259, 0.9280, 0.9292, 0.9267,\n",
       "                      0.9299, 0.9282, 0.9314, 0.9266, 0.9331, 0.9324, 0.9285, 0.9239, 0.9301,\n",
       "                      0.9335, 0.9385, 0.9285, 0.9366, 0.9306, 0.9409, 0.9253, 0.9369, 0.9377,\n",
       "                      0.9341, 0.9351, 0.9260, 0.9277, 0.9340, 0.9292, 0.9254, 0.9306, 0.9256,\n",
       "                      0.9284, 0.9283, 0.9328, 0.9301, 0.9332, 0.9260, 0.9302, 0.9377, 0.9338,\n",
       "                      0.9295, 0.9309, 0.9320, 0.9322, 0.9315, 0.9239, 0.9323, 0.9376, 0.9278,\n",
       "                      0.9252, 0.9266, 0.9247, 0.9263, 0.9340, 0.9275, 0.9315, 0.9315])),\n",
       "             ('conv_block3.4.num_batches_tracked', tensor(1)),\n",
       "             ('classifier.0.weight',\n",
       "              tensor([[-0.0208,  0.0128,  0.0097,  ..., -0.0023,  0.0048,  0.0220],\n",
       "                      [ 0.0067,  0.0009, -0.0211,  ..., -0.0060,  0.0130,  0.0025],\n",
       "                      [ 0.0231, -0.0004, -0.0104,  ...,  0.0182, -0.0081, -0.0097],\n",
       "                      ...,\n",
       "                      [-0.0189, -0.0083, -0.0026,  ..., -0.0223, -0.0126,  0.0068],\n",
       "                      [ 0.0176,  0.0233,  0.0115,  ...,  0.0071, -0.0005,  0.0024],\n",
       "                      [ 0.0244,  0.0121, -0.0083,  ...,  0.0014,  0.0200,  0.0083]])),\n",
       "             ('classifier.0.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
       "             ('classifier.2.weight',\n",
       "              tensor([[ 0.0395,  0.0130,  0.0588,  ..., -0.0261,  0.0328,  0.0533],\n",
       "                      [ 0.0094,  0.0446, -0.0012,  ...,  0.0668,  0.0150,  0.0636],\n",
       "                      [-0.0205,  0.0589,  0.0300,  ..., -0.0588,  0.0067, -0.0466],\n",
       "                      ...,\n",
       "                      [ 0.0347,  0.0731, -0.0437,  ...,  0.0591, -0.0213, -0.0381],\n",
       "                      [-0.0049, -0.0106,  0.0032,  ...,  0.0054, -0.0606, -0.0543],\n",
       "                      [ 0.0543,  0.0059,  0.0628,  ..., -0.0064, -0.0630,  0.0731]])),\n",
       "             ('classifier.2.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]))])"
      ]
     },
     "execution_count": 46,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.state_dict()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 设置交叉熵损失函数，SGD优化器"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.023837Z",
     "start_time": "2025-06-26T01:43:40.019952Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "损失函数: CrossEntropyLoss()\n"
     ]
    }
   ],
   "source": [
    "model = NeuralNetwork()\n",
    "# 定义损失函数和优化器\n",
    "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
    "\n",
    "print(\"损失函数:\", loss_fn)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.035848Z",
     "start_time": "2025-06-26T01:43:40.032419Z"
    }
   },
   "outputs": [],
   "source": [
    "model = NeuralNetwork()\n",
    "\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.732814Z",
     "start_time": "2025-06-26T01:43:40.035848Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "使用设备: cpu\n",
      "训练开始，共35200步\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "2bcb4a8b6dae4abeaf1083a8b574622c",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "  0%|          | 0/35200 [00:00<?, ?it/s]"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001B[1;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[1;31mKeyboardInterrupt\u001B[0m                         Traceback (most recent call last)",
      "Cell \u001B[1;32mIn[49], line 8\u001B[0m\n\u001B[0;32m      4\u001B[0m early_stopping\u001B[38;5;241m=\u001B[39mEarlyStopping(patience\u001B[38;5;241m=\u001B[39m\u001B[38;5;241m5\u001B[39m, delta\u001B[38;5;241m=\u001B[39m\u001B[38;5;241m0.001\u001B[39m)\n\u001B[0;32m      5\u001B[0m model_saver\u001B[38;5;241m=\u001B[39mModelSaver(save_dir\u001B[38;5;241m=\u001B[39m\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mmodel_weights\u001B[39m\u001B[38;5;124m'\u001B[39m, save_best_only\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)\n\u001B[1;32m----> 8\u001B[0m model, history \u001B[38;5;241m=\u001B[39m \u001B[43mtrain_classification_model\u001B[49m\u001B[43m(\u001B[49m\u001B[43mmodel\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtrain_loader\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mval_loader\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mloss_fn\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43moptimizer\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mdevice\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mnum_epochs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;241;43m50\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mearly_stopping\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mearly_stopping\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mmodel_saver\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mmodel_saver\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtensorboard_logger\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mNone\u001B[39;49;00m\u001B[43m)\u001B[49m\n",
      "File \u001B[1;32md:\\BaiduSyncdisk\\pytorch\\chapter_6_AI\\wangdao_deeplearning_train.py:193\u001B[0m, in \u001B[0;36mtrain_classification_model\u001B[1;34m(model, train_loader, val_loader, criterion, optimizer, device, num_epochs, tensorboard_logger, model_saver, early_stopping, eval_step)\u001B[0m\n\u001B[0;32m    190\u001B[0m loss \u001B[38;5;241m=\u001B[39m criterion(logits, labels)\n\u001B[0;32m    192\u001B[0m \u001B[38;5;66;03m# 梯度回传，计算梯度\u001B[39;00m\n\u001B[1;32m--> 193\u001B[0m \u001B[43mloss\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mbackward\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[0;32m    195\u001B[0m \u001B[38;5;66;03m# 更新模型参数\u001B[39;00m\n\u001B[0;32m    196\u001B[0m optimizer\u001B[38;5;241m.\u001B[39mstep()\n",
      "File \u001B[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\torch\\_tensor.py:581\u001B[0m, in \u001B[0;36mTensor.backward\u001B[1;34m(self, gradient, retain_graph, create_graph, inputs)\u001B[0m\n\u001B[0;32m    571\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m has_torch_function_unary(\u001B[38;5;28mself\u001B[39m):\n\u001B[0;32m    572\u001B[0m     \u001B[38;5;28;01mreturn\u001B[39;00m handle_torch_function(\n\u001B[0;32m    573\u001B[0m         Tensor\u001B[38;5;241m.\u001B[39mbackward,\n\u001B[0;32m    574\u001B[0m         (\u001B[38;5;28mself\u001B[39m,),\n\u001B[1;32m   (...)\u001B[0m\n\u001B[0;32m    579\u001B[0m         inputs\u001B[38;5;241m=\u001B[39minputs,\n\u001B[0;32m    580\u001B[0m     )\n\u001B[1;32m--> 581\u001B[0m \u001B[43mtorch\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mautograd\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mbackward\u001B[49m\u001B[43m(\u001B[49m\n\u001B[0;32m    582\u001B[0m \u001B[43m    \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mgradient\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mretain_graph\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcreate_graph\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43minputs\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43minputs\u001B[49m\n\u001B[0;32m    583\u001B[0m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\torch\\autograd\\__init__.py:347\u001B[0m, in \u001B[0;36mbackward\u001B[1;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001B[0m\n\u001B[0;32m    342\u001B[0m     retain_graph \u001B[38;5;241m=\u001B[39m create_graph\n\u001B[0;32m    344\u001B[0m \u001B[38;5;66;03m# The reason we repeat the same comment below is that\u001B[39;00m\n\u001B[0;32m    345\u001B[0m \u001B[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001B[39;00m\n\u001B[0;32m    346\u001B[0m \u001B[38;5;66;03m# calls in the traceback and some print out the last line\u001B[39;00m\n\u001B[1;32m--> 347\u001B[0m \u001B[43m_engine_run_backward\u001B[49m\u001B[43m(\u001B[49m\n\u001B[0;32m    348\u001B[0m \u001B[43m    \u001B[49m\u001B[43mtensors\u001B[49m\u001B[43m,\u001B[49m\n\u001B[0;32m    349\u001B[0m \u001B[43m    \u001B[49m\u001B[43mgrad_tensors_\u001B[49m\u001B[43m,\u001B[49m\n\u001B[0;32m    350\u001B[0m \u001B[43m    \u001B[49m\u001B[43mretain_graph\u001B[49m\u001B[43m,\u001B[49m\n\u001B[0;32m    351\u001B[0m \u001B[43m    \u001B[49m\u001B[43mcreate_graph\u001B[49m\u001B[43m,\u001B[49m\n\u001B[0;32m    352\u001B[0m \u001B[43m    \u001B[49m\u001B[43minputs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[0;32m    353\u001B[0m \u001B[43m    \u001B[49m\u001B[43mallow_unreachable\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[0;32m    354\u001B[0m \u001B[43m    \u001B[49m\u001B[43maccumulate_grad\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[38;5;28;43;01mTrue\u001B[39;49;00m\u001B[43m,\u001B[49m\n\u001B[0;32m    355\u001B[0m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[1;32m~\\AppData\\Roaming\\Python\\Python312\\site-packages\\torch\\autograd\\graph.py:825\u001B[0m, in \u001B[0;36m_engine_run_backward\u001B[1;34m(t_outputs, *args, **kwargs)\u001B[0m\n\u001B[0;32m    823\u001B[0m     unregister_hooks \u001B[38;5;241m=\u001B[39m _register_logging_hooks_on_whole_graph(t_outputs)\n\u001B[0;32m    824\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m--> 825\u001B[0m     \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mVariable\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43m_execution_engine\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mrun_backward\u001B[49m\u001B[43m(\u001B[49m\u001B[43m  \u001B[49m\u001B[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001B[39;49;00m\n\u001B[0;32m    826\u001B[0m \u001B[43m        \u001B[49m\u001B[43mt_outputs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\n\u001B[0;32m    827\u001B[0m \u001B[43m    \u001B[49m\u001B[43m)\u001B[49m  \u001B[38;5;66;03m# Calls into the C++ engine to run the backward pass\u001B[39;00m\n\u001B[0;32m    828\u001B[0m \u001B[38;5;28;01mfinally\u001B[39;00m:\n\u001B[0;32m    829\u001B[0m     \u001B[38;5;28;01mif\u001B[39;00m attach_logging_hooks:\n",
      "\u001B[1;31mKeyboardInterrupt\u001B[0m: "
     ]
    }
   ],
   "source": [
    "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "print(f\"使用设备: {device}\")\n",
    "model = model.to(device) #将模型移动到GPU\n",
    "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
    "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
    "\n",
    "\n",
    "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50, early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.737721Z",
     "start_time": "2025-06-26T01:45:37.732814Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'loss': 0.06384491175413132, 'acc': 98.4375, 'step': 20901},\n",
       " {'loss': 0.20276488363742828, 'acc': 89.0625, 'step': 20902},\n",
       " {'loss': 0.20786789059638977, 'acc': 92.1875, 'step': 20903},\n",
       " {'loss': 0.19361482560634613, 'acc': 90.625, 'step': 20904},\n",
       " {'loss': 0.11045418679714203, 'acc': 93.75, 'step': 20905},\n",
       " {'loss': 0.14794345200061798, 'acc': 95.3125, 'step': 20906},\n",
       " {'loss': 0.26282602548599243, 'acc': 87.5, 'step': 20907},\n",
       " {'loss': 0.132650226354599, 'acc': 95.3125, 'step': 20908},\n",
       " {'loss': 0.053449846804142, 'acc': 96.875, 'step': 20909},\n",
       " {'loss': 0.1254071295261383, 'acc': 96.875, 'step': 20910},\n",
       " {'loss': 0.12051813304424286, 'acc': 95.3125, 'step': 20911},\n",
       " {'loss': 0.1870228499174118, 'acc': 93.75, 'step': 20912},\n",
       " {'loss': 0.09360282123088837, 'acc': 96.875, 'step': 20913},\n",
       " {'loss': 0.038920432329177856, 'acc': 100.0, 'step': 20914},\n",
       " {'loss': 0.11737897992134094, 'acc': 93.75, 'step': 20915},\n",
       " {'loss': 0.12432114779949188, 'acc': 96.875, 'step': 20916},\n",
       " {'loss': 0.10819239169359207, 'acc': 95.3125, 'step': 20917},\n",
       " {'loss': 0.11210165172815323, 'acc': 93.75, 'step': 20918},\n",
       " {'loss': 0.053383588790893555, 'acc': 98.4375, 'step': 20919},\n",
       " {'loss': 0.09564866870641708, 'acc': 98.4375, 'step': 20920},\n",
       " {'loss': 0.1821669191122055, 'acc': 95.3125, 'step': 20921},\n",
       " {'loss': 0.11640985310077667, 'acc': 93.75, 'step': 20922},\n",
       " {'loss': 0.052147772163152695, 'acc': 100.0, 'step': 20923},\n",
       " {'loss': 0.16404616832733154, 'acc': 90.625, 'step': 20924},\n",
       " {'loss': 0.10825297236442566, 'acc': 96.875, 'step': 20925},\n",
       " {'loss': 0.2164526730775833, 'acc': 93.75, 'step': 20926},\n",
       " {'loss': 0.28561729192733765, 'acc': 89.0625, 'step': 20927},\n",
       " {'loss': 0.04579192027449608, 'acc': 100.0, 'step': 20928},\n",
       " {'loss': 0.09582198411226273, 'acc': 96.875, 'step': 20929},\n",
       " {'loss': 0.12679031491279602, 'acc': 95.3125, 'step': 20930},\n",
       " {'loss': 0.2336992621421814, 'acc': 87.5, 'step': 20931},\n",
       " {'loss': 0.10615304857492447, 'acc': 95.3125, 'step': 20932},\n",
       " {'loss': 0.10398928821086884, 'acc': 95.3125, 'step': 20933},\n",
       " {'loss': 0.13045868277549744, 'acc': 93.75, 'step': 20934},\n",
       " {'loss': 0.2080601304769516, 'acc': 89.0625, 'step': 20935},\n",
       " {'loss': 0.20070905983448029, 'acc': 93.75, 'step': 20936},\n",
       " {'loss': 0.06665907055139542, 'acc': 98.4375, 'step': 20937},\n",
       " {'loss': 0.10915737599134445, 'acc': 96.875, 'step': 20938},\n",
       " {'loss': 0.15471863746643066, 'acc': 93.75, 'step': 20939},\n",
       " {'loss': 0.17789672315120697, 'acc': 90.625, 'step': 20940},\n",
       " {'loss': 0.1540445238351822, 'acc': 93.75, 'step': 20941},\n",
       " {'loss': 0.1487114578485489, 'acc': 95.3125, 'step': 20942},\n",
       " {'loss': 0.0906917005777359, 'acc': 96.875, 'step': 20943},\n",
       " {'loss': 0.18069107830524445, 'acc': 96.875, 'step': 20944},\n",
       " {'loss': 0.06297708302736282, 'acc': 96.875, 'step': 20945},\n",
       " {'loss': 0.0945296511054039, 'acc': 96.875, 'step': 20946},\n",
       " {'loss': 0.1064353957772255, 'acc': 95.3125, 'step': 20947},\n",
       " {'loss': 0.10106613487005234, 'acc': 95.3125, 'step': 20948},\n",
       " {'loss': 0.1978270709514618, 'acc': 90.625, 'step': 20949},\n",
       " {'loss': 0.07122526317834854, 'acc': 96.875, 'step': 20950},\n",
       " {'loss': 0.24536970257759094, 'acc': 90.625, 'step': 20951},\n",
       " {'loss': 0.13602563738822937, 'acc': 93.75, 'step': 20952},\n",
       " {'loss': 0.10332592576742172, 'acc': 95.3125, 'step': 20953},\n",
       " {'loss': 0.15197597444057465, 'acc': 95.3125, 'step': 20954},\n",
       " {'loss': 0.10614384710788727, 'acc': 96.875, 'step': 20955},\n",
       " {'loss': 0.0938277542591095, 'acc': 95.3125, 'step': 20956},\n",
       " {'loss': 0.17759718000888824, 'acc': 92.1875, 'step': 20957},\n",
       " {'loss': 0.15237922966480255, 'acc': 92.1875, 'step': 20958},\n",
       " {'loss': 0.27344074845314026, 'acc': 89.0625, 'step': 20959},\n",
       " {'loss': 0.0806373581290245, 'acc': 96.875, 'step': 20960},\n",
       " {'loss': 0.16364435851573944, 'acc': 92.1875, 'step': 20961},\n",
       " {'loss': 0.11030598729848862, 'acc': 98.4375, 'step': 20962},\n",
       " {'loss': 0.2015817016363144, 'acc': 95.3125, 'step': 20963},\n",
       " {'loss': 0.11726050823926926, 'acc': 93.75, 'step': 20964},\n",
       " {'loss': 0.08437590301036835, 'acc': 96.875, 'step': 20965},\n",
       " {'loss': 0.27162399888038635, 'acc': 87.5, 'step': 20966},\n",
       " {'loss': 0.20128296315670013, 'acc': 92.1875, 'step': 20967},\n",
       " {'loss': 0.15134297311306, 'acc': 96.875, 'step': 20968},\n",
       " {'loss': 0.19934432208538055, 'acc': 92.1875, 'step': 20969},\n",
       " {'loss': 0.1675749123096466, 'acc': 92.1875, 'step': 20970},\n",
       " {'loss': 0.05743875354528427, 'acc': 96.875, 'step': 20971},\n",
       " {'loss': 0.07248720526695251, 'acc': 98.4375, 'step': 20972},\n",
       " {'loss': 0.13896992802619934, 'acc': 93.75, 'step': 20973},\n",
       " {'loss': 0.12481243908405304, 'acc': 95.3125, 'step': 20974},\n",
       " {'loss': 0.0460551381111145, 'acc': 100.0, 'step': 20975},\n",
       " {'loss': 0.2739729881286621, 'acc': 92.1875, 'step': 20976},\n",
       " {'loss': 0.1718432903289795, 'acc': 93.75, 'step': 20977},\n",
       " {'loss': 0.061360590159893036, 'acc': 98.4375, 'step': 20978},\n",
       " {'loss': 0.06863144785165787, 'acc': 96.875, 'step': 20979},\n",
       " {'loss': 0.10120609402656555, 'acc': 96.875, 'step': 20980},\n",
       " {'loss': 0.1153089702129364, 'acc': 96.875, 'step': 20981},\n",
       " {'loss': 0.15314456820487976, 'acc': 93.75, 'step': 20982},\n",
       " {'loss': 0.10311352461576462, 'acc': 96.875, 'step': 20983},\n",
       " {'loss': 0.18238304555416107, 'acc': 93.75, 'step': 20984},\n",
       " {'loss': 0.12446410953998566, 'acc': 92.1875, 'step': 20985},\n",
       " {'loss': 0.08146905899047852, 'acc': 96.875, 'step': 20986},\n",
       " {'loss': 0.09682399034500122, 'acc': 96.875, 'step': 20987},\n",
       " {'loss': 0.1253242790699005, 'acc': 93.75, 'step': 20988},\n",
       " {'loss': 0.09639178216457367, 'acc': 98.4375, 'step': 20989},\n",
       " {'loss': 0.15836486220359802, 'acc': 92.1875, 'step': 20990},\n",
       " {'loss': 0.15511967241764069, 'acc': 96.875, 'step': 20991},\n",
       " {'loss': 0.08068569749593735, 'acc': 96.875, 'step': 20992},\n",
       " {'loss': 0.13185115158557892, 'acc': 95.3125, 'step': 20993},\n",
       " {'loss': 0.12577024102210999, 'acc': 93.75, 'step': 20994},\n",
       " {'loss': 0.2853798568248749, 'acc': 85.9375, 'step': 20995},\n",
       " {'loss': 0.15519757568836212, 'acc': 93.75, 'step': 20996},\n",
       " {'loss': 0.20634007453918457, 'acc': 90.625, 'step': 20997},\n",
       " {'loss': 0.061440326273441315, 'acc': 98.4375, 'step': 20998},\n",
       " {'loss': 0.09722928702831268, 'acc': 96.875, 'step': 20999}]"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history['train'][-100:-1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.741226Z",
     "start_time": "2025-06-26T01:45:37.737721Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'loss': 2.3031500419616697, 'acc': 3.9, 'step': 0},\n",
       " {'loss': 0.64743413772583, 'acc': 76.32, 'step': 500},\n",
       " {'loss': 0.5234741040229798, 'acc': 80.38, 'step': 1000},\n",
       " {'loss': 0.45916834726333616, 'acc': 83.68, 'step': 1500},\n",
       " {'loss': 0.4298902058839798, 'acc': 84.62, 'step': 2000},\n",
       " {'loss': 0.4169245558023453, 'acc': 85.62, 'step': 2500},\n",
       " {'loss': 0.36880439991950986, 'acc': 86.74, 'step': 3000},\n",
       " {'loss': 0.3662023733615875, 'acc': 86.4, 'step': 3500},\n",
       " {'loss': 0.350418140232563, 'acc': 87.12, 'step': 4000},\n",
       " {'loss': 0.3435691477537155, 'acc': 87.56, 'step': 4500},\n",
       " {'loss': 0.3275507384061813, 'acc': 88.1, 'step': 5000},\n",
       " {'loss': 0.31591101334095, 'acc': 88.42, 'step': 5500},\n",
       " {'loss': 0.31030238451957703, 'acc': 88.64, 'step': 6000},\n",
       " {'loss': 0.3010372543692589, 'acc': 89.12, 'step': 6500},\n",
       " {'loss': 0.29796574910879137, 'acc': 89.08, 'step': 7000},\n",
       " {'loss': 0.2885057336330414, 'acc': 89.56, 'step': 7500},\n",
       " {'loss': 0.3216348115205765, 'acc': 87.84, 'step': 8000},\n",
       " {'loss': 0.288613707613945, 'acc': 89.66, 'step': 8500},\n",
       " {'loss': 0.27080982087254524, 'acc': 89.82, 'step': 9000},\n",
       " {'loss': 0.291040456867218, 'acc': 89.7, 'step': 9500},\n",
       " {'loss': 0.26751453694701194, 'acc': 90.3, 'step': 10000},\n",
       " {'loss': 0.2709929524064064, 'acc': 90.24, 'step': 10500},\n",
       " {'loss': 0.2903787411928177, 'acc': 89.72, 'step': 11000},\n",
       " {'loss': 0.2626781419634819, 'acc': 90.48, 'step': 11500},\n",
       " {'loss': 0.26104585822820664, 'acc': 90.44, 'step': 12000},\n",
       " {'loss': 0.25581388503313063, 'acc': 90.86, 'step': 12500},\n",
       " {'loss': 0.2597031578719616, 'acc': 90.76, 'step': 13000},\n",
       " {'loss': 0.2572694596529007, 'acc': 90.54, 'step': 13500},\n",
       " {'loss': 0.25097098011821506, 'acc': 90.8, 'step': 14000},\n",
       " {'loss': 0.2816928417801857, 'acc': 90.16, 'step': 14500},\n",
       " {'loss': 0.25512566217035054, 'acc': 90.98, 'step': 15000},\n",
       " {'loss': 0.2807390443563461, 'acc': 90.1, 'step': 15500},\n",
       " {'loss': 0.25120172172784805, 'acc': 91.4, 'step': 16000},\n",
       " {'loss': 0.25184848205447197, 'acc': 91.22, 'step': 16500},\n",
       " {'loss': 0.24769503179490565, 'acc': 91.5, 'step': 17000},\n",
       " {'loss': 0.2745075279131532, 'acc': 90.4, 'step': 17500},\n",
       " {'loss': 0.2425320431679487, 'acc': 91.22, 'step': 18000},\n",
       " {'loss': 0.24461129485219718, 'acc': 91.7, 'step': 18500},\n",
       " {'loss': 0.24318830487430096, 'acc': 91.24, 'step': 19000},\n",
       " {'loss': 0.26301120128929617, 'acc': 91.54, 'step': 19500},\n",
       " {'loss': 0.23785226551294328, 'acc': 91.68, 'step': 20000},\n",
       " {'loss': 0.2427010852009058, 'acc': 91.48, 'step': 20500}]"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history['val'][-1000:-1]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 绘制损失曲线和准确率曲线"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.816716Z",
     "start_time": "2025-06-26T01:45:37.744941Z"
    }
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzoAAAHACAYAAABqJx3iAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAphFJREFUeJzs3Qd4U+X3B/BvZkuhpcy27L2hbGSIoOwhiOJCBVTcA/m5/y5cuDduBReiqOAAkb333nuPsqFAV9b/Oe9NSls6kjZt0tzv53nuk53cvB03J+e85zW4XC4XiIiIiIiIQogx0DtARERERETkbwx0iIiIiIgo5DDQISIiIiKikMNAh4iIiIiIQg4DHSIiIiIiCjkMdIiIiIiIKOQw0CEiIiIiopDDQIeIiIiIiEKOGcWA0+nEkSNHEBkZCYPBEOjdISLSDVlT+vz586hUqRKMRn435sHjEhFR8B+bikWgIweTqlWrBno3iIh06+DBg6hSpUqgdyNo8LhERBT8x6ZiEejIN2aeNxMVFeXz4202G2bMmIEePXrAYrEUwh6GFo6XbzhevuF4Fa8xS0xMVB/oPf+HScPjUtHjmPmG4+UbjlfxGi9vj03FItDxlAXIwSS/B5SIiAj1WP7y5o3j5RuOl284XsVzzFielRmPS0WPY+YbjpdvOF7Fc7zyOjax4JqIiIiIiEIOAx0iIiIiIgo5DHSIiIiIiCjkFIs5OkQUvO0d7XY7HA6HT3W9ZrMZKSkpPj1OzwpzzEwmk3puzsEhIqJQw0CHiPIlLS0NR48eRVJSks/BUWxsrOpWxQ/XwTFmMqE0Li4OVqsVxcGCBQvw9ttvY/Xq1ep3cPLkyRg4cGCm8XrxxRfx1Vdf4ezZs+jYsSM+++wz1K1bN/0+p0+fxsMPP4y///5brcFw/fXX48MPP0SpUqUC9K6IiMjfGOgQUb4WS9y7d6/KBshiXfIB2dsP4PLYCxcuqA+UXIASAR0zCQgkYD1x4oT6eUogUBx+JhcvXkR8fDzuvPNODBo06LLb33rrLXz00Uf47rvvULNmTTz//PPo2bMntmzZgvDwcHWfIUOGqCBp5syZKmM2fPhw3HPPPZgwYUIA3hERERUGBjpE5DP5cCwfvqWHvWQDfCGPk8fLB87i8KE6GBTmmJUoUUK1Bt2/f3/6awS73r17qy2n4O2DDz7Ac889hwEDBqjrvv/+e8TExGDKlCm4+eabsXXrVkyfPh0rV65E69at1X0+/vhj9OnTB++8844K3omIqPhjoENE+cZAJTSE0s9RMlMJCQno1q1b+nWlS5dGu3btsHTpUhXoyGl0dHR6kCPk/jIOy5cvx3XXXXfZ86ampqot42J1QrJBsvnK85j8PFavOGa+4Xj5huNVvMbL29dloENERCFDghwhGZyM5LLnNjmtWLFiptulIUPZsmXT75PVmDFjMHr06Muul5XBfc1qZiSlc+QbjplvOF6+4XgVj/Hydn4wAx0iIqI8PPPMMxg1alSmjI6Ubvbo0UOtDJ6fbyPlA0L37t25CruXOGa+4Xj5huNVvMbLk1XPCwMdIqJ8qlGjBkaOHKm2gpo3bx66du2KM2fOqLIqyh/pTieOHTumOsl5yOXmzZun3+f48eOZHidt0qUTm+fxWYWFhaktKznAF+QgX9DH6xHHzDccL99wvIrHeHn7mqFTmE1E5IUuXbr4JTARMpldOnVR8JAuaxKszJ49O9M3fzL3pn379uqynErbaWlP7TFnzhzV9EHm8hARUWhgRoeIKEvXLlmUU+Zs5KVChQpFsk+UmbTa3rVrV6YGBOvWrVNzbKpVq6YC2VdffVW1y/a0l5ZOap61dho2bIhevXphxIgR+Pzzz1UJxkMPPaQaFbDjGhFR6Aj5jM7aVYvxxzv34sTmWYHeFaKQDg6S0uxeb8lpDp/un9smr+2tYcOGYf78+WphSFn3R7bx48er03///RetWrVS5UmLFi3C7t27VXtimcQu69e0adMGs2bNuqx0TVoZe8jzfP3116prl0xQlw/af/31V77H9ffff0fjxo1VC+hmzZrhvffey3T7p59+ql5DWkLLft5www3pt/32229o2rSpemy5cuVUVzFZfyYUrFq1Ci1atFCbkLkzcv6FF15Ql5988km1GKhk2+TnJoGRtJPO2Dr7p59+QoMGDXDNNdeottKdOnXCl19+GbD3RETkb1uPJuK6Txej1wcLctyGjVuBXcfPI1SFfEbHeGYfbkr9HZtRJ9C7QhSykm0ONHrhv4C89paXeyLC6t2/MglwduzYgSZNmuDll19W123evFmdPv3002oNlVq1aqFMmTI4ePCg+gD82muvqeBH1mLp378/tm/frrIGOZHOXLJg5dtvv63WZpGFKWWNGsk2+ELKqm688Ua89NJLGDx4sCqtevzxx1G+fHkVsMmH/UceeQQ//PADOnTooOaXLFy4UD1WFsK85ZZb1H5I0HX+/Hl1my9BYbCXH+b2XiTglJ+v52ecHfl5cHFQIgpVqXYHHp24FjuOXcj1ftsSzmPF3tMYM6gpBjSvjFAT+oGORfsGzwx7oHeFiAJM1lOxWq0q2+KZdL5t2zZ1Kh+KpXtMxg/C8fHx6ZdfeeUVTJ48WWVopMwpJxKESJAhXn/9dXz00UdYsWKFKpXyhWRvJNsgZVcyd0T2V0q0JICS1zhw4ABKliyJfv36ITIyEtWrV0/PcEigI5PrBw0apK4Xkt0hIiJ9GDtnlwpyypW04p0b42HJZr00p8uFz+btxtI9p/DoxHVYvvc0XujXCOEWE0JFyAc6JqsW6FiRFuhdIQpZJSwmlVnxhnxoP594HpFRkX5ZqFJe2x8yLh4ppNxJsilTp05NDxySk5NVgJEbKTHzkEBEWg9n7fDlja1bt6rSuYwkcyNZKZlDJEGZBDGSgZIgSjZPyZwEaBIkSXDTs2dP1QJZytokU0VERKFt85Fz+HTebnX+5QFN0LV+5nXDMupYpzw+mLUDn8zdhQnLD2D9wbP4dEhLVC9XEqHAqJdAx+LiSrdEhUVKhaR8zNuthNXk0/1z2+S1/UGCkoykTEwyOJKVkbIvmewugUNaWppPLS9l/yS48zfJ4qxZswY///yzaqMs81MkwJFuYiaTSa1vIPOOGjVqpEro6tevrzJCREQUumwOJ56YtAF2pwu9m8Sib7NLbfazYzIa8L8e9TFuWBuUibBg85FE9PtoEaZvOopQEPKBjtkT6LB0jYgku2u1qoxIXhYvXqxKxCRLIgGOlI7t27cPRUU6g8k+ZLRkyRLUq1dPBTJCOsNJkwGZi7Nhwwa1fzKXxxNgdezYUc0ZWrt2rXrfErgRUdEKlblxVDx8Pm83thxNRHSERWVzvNWlfkVMfeRKtKpeBudT7bjvxzV4+e8tSLP7/4u6oqSDQCdCnYaBGR0i0jqlyZoqEhScPHkyx2yLdDP7448/VCZn/fr1uPXWWwslM5OT//3vf2otGJkbJA0UJHMzduxYlWkS//zzj5r/I/snzQ6kWYLsn2Ru5P1JJkoaFkipnbyPEydOqOCJiIrOlLWH0fSlGfhnw5FA70qxsOPYebR4eQZG/601idHLe2796iy8N2N7gZ9re8J5fDRnpzr/Uv/GqBB5+SLHuakUXQIT77kCI66sqS5/u3gv7vlhFZzO4hush3ygYwkroZ0y0CEid0maZESkpEvWwclpzo00A5A5LTIvRrqtyVyXli1bFtl+ymv9+uuvmDhxopr3I4GLZGckyySio6NVAHP11VerAEbWg5FgSNpRy7ygBQsWqK5xkgF67rnn8O6776J3795Ftv9EBHy9aA8upNrx7B8bcSwxJdC7E/TGzt2FM0k2jFu8D3O3+z63sbi+55MXUvHt4n2qU1p+2aVk7bf1sDlc6NawIgY0z9+aYBaTEf/XtxG+vL0Vwi1GzNt+AhNW5D43NZiFfDMCS1h4ekan+MajROQv8sF/6dKlma7zBA9ZMz+eMjCPBx98MNPlrKVs2ZWoyJyZ/LZMvv7669UmmZrExEQVwHjIui/z5s3L9rkk8JF1Y4gocA6cSsKmw4nqfGKKHf83eSO+uqO13+YVhpqEcymYuuHSvBAJDv97rDOiwjPPewzV9ywB8aKdJ3FNw5h8PdfXi/Ziw6FziAw347Xrmhb496xH41g82bMBXv5nC8ZM24quDSqicrSWPChOQj6jY3VndMwGJ+w2dl4jIiKiwjfNPZm7TsVSsJgMmLX1OP5azxK2nHy/dJ+aQN+yWjSql4vA0XMp6gO2Ht6zx9SN+WsAsOv4Bbw3c4c6/3y/RoiJurQ4ckEM61ADrauXwcU0B57+fUOxnG9m1EtGR6SlJgd0X4hIv+677z6UKlUq201uI6LQ8q/7Q6t8WHz46rrq/It/bcaJ86kB3rPgk5zmSC+Puveq2njzeq1N/88rDqosR6i/57s7aXNiZm455vPkf4fThSd/W68e17leBQxuVcVv+2g0GvDWDc0QZjZi4c6TmLTqEIob3WR0hC2V9bFEFBiyIKk0Dshuk9uIKHQcOpOE9YfOwWgAejaOxf1daqNRXBTOJtnwwp+bEArvLzHFf3Of/1h7SI1NtbIR6NYwBlfUKoc72muLHT/1+wZV1hVqMr7np3o3UI0DzqfYsXi3b4HduMV7sebAWZQKM2PMoIKXrGVVq0IpjOpeT51/ZeoWVW5XnIR8oGO2WGFzaa1YmdEhokCpWLEi6tSpk+0mtxFR6Ph3Y4I6bVuzrPoAKxO83x7cDGajAf9uSsC0fJYoBQPp7HX1O/Nx3djFKitRUNLR69tFe9OzX7Kui3iqVwM1J+Tw2WS8NX0bQknW9yy/H7LmjZiWYZ5SXqSJwTvubm3P9NHGqzDcfWUtxFeNVoGYzDUrTiVsIR/oiDR3zwV7GgMdIiIiKpr5OX2aXlqssXGl0iqzI56fsgmnLxbPecNfL9yDNIcTu09cxLt+aIm8YOcJ9VySkRjc+lLZVckwc3oJ2/dL92PZnlMIFZ73HBlmxo1tqqrrejfRfldmbDmmFv30xk/LDiDF5kSzKqVxa9tqhba/JqMB79zQDFaTEbO3HceUdYdRXOgj0DFY1SlL14iIiKgwHTmbjLUHzkIqiHo11r6l93jo6jqoF1MKpy6m4aW/it9aMZJB+HPdpYYK3yzei9X7zxToOb9xZzZualMVkVk6rHWqWx63tK2aXsLmjwxSMPC8ZwlyJMDzZP/Kl7LiXLINS3fnHdRJK+oflu1X5+/qVLPQu/nVjYnEo920uWYv/bWl2Mw100WgY4P2h+OwMdAhIiKiwiOlaaJN9bKomKX7VZjZhLdviFdzd6QD24zN2n2Lix+X7VfZHCljGtSiMqSCSSbCp9gc+V4sUya5y3hICVd2nunTEHGlw7H/VFJ6mVZxltN7lqyJzOcS3pQ2/r3+qAo8Y6PCM2UOC9M9nWuhcaUoFYy9+PdW9fMPdvoIdAxaoMPSNSIiIiqKbmu9m2bO5nhIkDCicy11/rkpm3AuqXgsaC4ZBAl0PBmEF/o3UvOPpATrw9k78/WcMpFe9GgUi6plI7K9j6yj8/qgpur8tyqDdBrFmSyGmtN79gQs/21OUAuA5kTmyHjm+NzRobqa41MULDLX7IZ4Ndds5tbjWHcq+NeECvkFQ4VNStdcgCOteKTZiIiIAkHmBvyz4Qi61q+I6Ait7Ls4kE5QS/ecRP9mlWAuog99Oe3HKncpl2fORXYe61ZPtRLec+KiWpDx3Rvj8/Vay/eeQt+mcUXynv9adwQnL6Sp7IpMnJcPva8ObIJ7f1iNLxfsUdc1qxLt9fPJHKU/1mhzPe50t1fOifw+Xt+yCn5fcwiPT9qAG1tr5Wy+kvWMrm1eCRUjfVtnRgKLfzYcxaEzOX9hXjLMhAHxlVE6wpLHez6U43tuV7Msypa0qvst33saHeuUz/Z5lu05jS1HExFuMRbq3JzsNKoUhQe71lHB7aS9RpRfsBcmk9b0y1eVosMxoHllFCZdBDp2d0aHpWtEVFA1atTAyJEj1ZYXqZmePHkyBg4cWCT7RlRQXy3cg7emb1drcbw92PcP34Hy0IQ1KsDYlnAez/RuGLD9mO5uQtCqehnEls75w3S4RUrYmuGGz5eqD+8ju9XNMaORk+f/3KSCJfnAW9jvWWUQ3JmIO9prXcKElFr1j6+Ev9cfwROTNuCvhzuq8jxvTFi+H6l2J5pWLo02Ncrkef8X+jVSk/j3nryINwvQhU0W5fztvg7p3d28IQHZ/yatz/N+X8zfg7FDWqJ51ewDvp9XHMj1PUvA2rNxjFo/SPYzp0BHMltCgr9AfCHxYNc66nd9+7ELeGdm/rJ5nsCOgY6/MjrSzo+la0RERLl+ay/m7TihPtwW9gRnf1h38Gx6FuWrBXvQp0mcKg8LhGnu+TmeVsG5aVW9LOKrRKv9X3PgjE+BjrQnXrH3dJG9Z8kgbD2aiBIWU3pzAI+X+jfCkl0nsf3YeYyduzt9zZXcyOKW0klN3Nmphle/Z5Ip+eqO1pi44oBaJDO/86ekUYSUzEnLZG8cT0zB6L+1xhFX1i2v5sRkRzIwB04nYfDnS/B/fRpiaIfM70ve83dL9uX5niUTKIHOf5sS8MqAJpcFZPtOXsSsrcfU+eEdc8+EFRar2YiPb47HSxMXIq5yFRgNxnyv0VPYdBHoOIzuQMfO0jUiIqLs7DlxQWVEhHRU2nn8AurFRCLYeeYqyOrt8m35E7+tx98Pd/I6s+Avx8+nYOU+Lfjo7eXk8BbVtEBHPnz78s323lMX1YRwIZ/5C/s9e7qEXd+q8mUZhHKlwjB6QGM8NGEtPp27S2UkpJV2bmSy/fHzqagYGYa+TSt5vR+SKckpW+KNFtXK4NnJG1VTA1mYtEb5krneX4J9mUeVmGJXWZhxw9rkWCYoC6g+9dsGFUy99PcWrNx3Bm9c3zS9k5y377l97XKIjrCoznwSzMrljMYv2aeaAHSpXwF1KhZ+oJCTmuVL4qZaTvTp0wQWS87leoGmi2YEDndGx8XSNaLCIf910y56v9mSfLt/bpsPbV++/PJLVKpUCU5n5kmeAwYMwJ133ondu3er8zExMShVqhTatGmDWbNm+W2YNm7ciKuvvholSpRAuXLlcM899+DChQvpt8+bNw9t27ZFyZIlER0djY4dO2L//v3pj73mmmsQGRmJqKgotGrVCqtWrfLbvhF5uoV5LNrp2wrtgXD0XHJ6h6pvhrZR7Xl3HLuAT+bsKvJ9kW/g5d+RfBD3duHGltW08iXJ6PhijTuD1SA2stDfs2QQZm/LPYMg84Sklbbd6VIlbLmtAyPBgydwuqN9dZUdKCqSjepYp5xae+bJ3zeozFhu/t5wVK1rI3N71IKvucyFkqYJnw5piRf7N1L3l9Kz/h8vwpYjiT69ZykL7NEoJtvuaxLc/rrqoDp/Z4CyOcWNLjI6dk9Gx8aMDlGhkMDlde++lZN/734tsHj2CGDN/Vs5j8GDB+Phhx/G3LlzVdAgTp8+jenTp2PatGkq6OjTpw9ee+01hIWF4fvvv0f//v2xfft2VKtWsAmfFy9eRM+ePdG+fXusXLkSx48fx913342HHnoI48ePh91uV3N5RowYgZ9//hlpaWlYsWJFenmDBEUS3Hz22Wdq4ue6deuC+ls0Kn48H6rqViylsjlLdp/Mc5J4oEn5k3y4vqJWWbXmyssDmuCBn9bg03m71fyRJpVzzyz407SNCekf+r0lGR0hH4alRbPM3fHG2oNn1elV9SqokrXCfM+eDELX+hVQO4dSI/k/9fLAxli295SaM/TF/N146GptzZWspMxw4+FzKgN3a7vqKEqyn28MaoaeHyxQ2ZIfl+9Xc46yc+pCavpaRzInpUFslFfPL8GgBLuS4dp3KgkDP12MIe2q+fSeJSP466pDmL45AS9d2zi9fO3XlQeRlOZQf6NSRkd500VGx2lyZ3RYukaka2XKlEHv3r0xYcKE9Ot+++03lC9fHl27dkV8fDzuvfdeNGnSBHXr1sUrr7yC2rVr46+//irwa8trpqSkqOBJnl8yO5988gl++OEHHDt2DImJiTh37hz69eunXrNhw4YYOnRoeoB1+PBhFZw1aNBA7ZsEbbK/RP6w/9RFbD6SqD5QPd+vUfq8jNxa3AZaUpodE5YfyPTttrTn7dM0Vs3heOK33DML/iTrmUgHNNHLi/k5HpL5kRbNEqxtOnzO68dJqZsnUJL3LHOC5D0/6eV7lrVcnvljowpmc5Mpg5BH0CudzCSbIaQj1z3fr8K9P1y+SXmXGNSysuowVtRkLtRTvRqo82/8uw0HTydle78X/tqsup9J1uyBLnV8LpH75+FOKjiUuTmeltLevueOtcsjKtysSkg9C7LK36IEnZ6fRXGYPxcMdJHRcRrDtDN2lq4RFQpLhJZZ8YKUjSWeP4+oyEgYjUb/vLYPhgwZorImn376qcra/PTTT7j55pvVvkhG56WXXsLUqVNx9OhRlWVJTk7GgQPah6mC2Lp1qwpMpCzNQ0rTZDwkY9S5c2cMGzZMZX26d++Obt264cYbb0RcnPbt8AMPPKCyOrK/cpsEOhIQEfkzG9G+Vjl0qlNezRE4m2TD+kPnVAexYCSdsOSDeLWyEbimoVbqI0Zf20StLC+T5z+btxuPXJN9ZsGfZN0TqYJqVqW0T00F5MNqi6rRqjxKgpfWNcrm+ZiLqXZsT0hM/0AtJJO1bI+WTcnrPf+++hD+b8pGVb41ceUBPHpNXTx8dd1su5B5Mgj1Ykqp34u8DGxeGVM3HMWsrcfVe8qJvFSgJtKL26+orkrLJKvz9B8b8ONd7TLdLh3F5H3ImLwzOD5f5XVlSlpVOeUXC/aoOUEGH8rN5PW6N4pVHfkk09q2Zlk1nofPJqNMhAXXtSjcTmWhRBeBjis9o5MW6F0hCk3yzZKX5WOQ+TEWh3Z/fwQ6PpJSNKmXlmBG5uAsXLgQ77//vrrt8ccfx8yZM/HOO++gTp06ai7NDTfcoMrIisK4cePwyCOPqFK6X375Bc8995zaH5m38/TTT6tA6N9//1Xbiy++iIkTJ+K6664rkn2j0PbvpkuLXBqNBnSoXU4FP4t3nQzKQEfmVnha7A7vWCPTh3TJkEi5z6MT1+HjOTvRo3GMV2VHBfGvO1DMzwr1LauXUR9ivZ2ns/7QWRVUSTYoxt0BLOt7lhK2WuUydweT0rgX/9yMX9wZmhrlIlRp1Qezdqqswfs3NUf5Uu4vhrNmEDp6l0GQ+8jzyHyv3DJLdSqUCmijC/kdf+v6Zuj14QIs3nUKE1cexA0ttJ/dmaQ0PDdFK1m776paBSoFlNe5v0ttdG9UEclpTtTN7j07HUDSKSCifKZjYt9mWqAjf5vSWtvTdGNIu+q5lzjK8x3bDBxcDhxYCiRsAsrWBKq0Aaq2BSq1BMLyaGIgtYrnE4DTu4GLJ4DkM0DyWe00RTs1JZ1B5xNHYEr8HoiKBUrFApFyGpPhNA4wB3Y9Ll1ldAzM6BDpXnh4OAYNGqQyI7t27UL9+vXRsmVLddvixYtVMOEJHiTDs2+fdqAvKClFk7k4MlfHk9WR15NMkuyDR4sWLdT2zDPPqPk8UvImgY6oV6+eKl177LHHcMstt6jAiIEOFZSU7mw4dE59yy4fkEWH2uXTA52iyIj4av7OE2qxzcgwMwZns3jktWptF8ksHFOT4yc/0KHQFtWU8qale0553VY6K8noZCxHy4vnfs3d83syv+cjKpsiXdh+ubtNpo56Mo9HuupJvCILlsq8kz/XHcb/Td6EhTtPou9HC/HxLS1V9kB4MghSajXQhwyCdBnL74KeRUk6rj3eoz5enboVr03dio61tID+tWnbVSmidDTz6ndfgoKNvwHrfwYsJYCSFbStVEWgZHl1vk7JioDVAWxbDJzZC5zZB5yW073A2QOyoj1Qvh5w9fNAw/7qy0NZQ0d+v48lpqqAU+Y2SZOD29tnmeOTlgQcXgUcWKZtB1cAaVr3xHQntwM7pmvnDSYgprEW9FRpC0RX1fZFgppT7u30HsB2EbmRvyY1Yrv35HwnSTTENgUqtwYqtwKqtAbK1tK+HC0iugh0XGb3NxTyi0REuiflazIXZvPmzbjtttvSr5e5L3/88YfK+sg3k88///xlHdoK8pqShZF5N1Ied+LECdUY4fbbb1dd3vbu3au6wl177bWqM5yUs+3cuRN33HGHKp974oknVHAj5WqHDh1SDQ2uv/56v+wb6Zsnm9OuZrn0b/Q9ZUqSZZC5MBHW4Pq44Pl2+6Y2VVEq7PJ9k7/f165rghV7T6lJ4F8u3OPzPAtvzdicoObHNK4UherlvMxsZ9C0SmmVkUpITFFd5OJKl/Bufk6WNsvae26KFXvnq8D1m8X7IeGGZFeenbIFF1LtqkPbhze3SF+IclDLKipjIUHQruMXcMtXy/BEz/q458paGTII1bxuklDcSPmclIatOXAWr05eicY4iz93m1XQLwu65tmuWwKEqaOA3XMKvjMndwC/3q4FBN1eQljNzujWKAaT1x7G69O2qrv0a1YpPYuHI2uB1eO1ICvtUvdOxRqpBTLVrgBim2mBy6EVwMGVQOIhIGGDtq38Ouf9kbVxoqsBkZWAEtFAiTJAuPu0RDTs1kisXr8FrRrVhDnpJHAhQcsCXTimbeePAY5U4PBqbfOQ5/AEPVXbAXW0xkCFJbj+cxUWd+maQQaciHRPGgGULVtWBRO33npr+vXvvfeeajPdoUMH1aDgqaeeUk0C/CEiIgL//fcfHn30UVUyJ5clUJHX9Ny+bds2fPfddzh16pSam/Pggw+q5ghSOifd4STbJI0LZN8kKzV69Gi/7Bvpm2d+jkzi96heLkKVRsk3+rIeiHT3ChY7j11QGQj5MCqLMuZEPhBKYwVpSiDlWdKyt07FyEJbJDQ/ZWtCgkiZ8C7NINbsP4u+zXIOdKTsdq27xM0zPyen9/zR3N2IL2PEyqXa5H/J1Hx8S4tLH5TdpITszwc74v8mb8SUdUfUBP1ZW45dyiBc4YfOaFL2dHQdcHiN9gE96bRWviwlVHJqldNSl66TDEhUJa30SUqgTOacy7QSjwBn92uZkbMHgYiyQL1eWqYiDyYD8OGVDiyb9CX6HlqKCEMq2lkb4nC929Gics+cH+iwAcs+BeaOAezJgCkM6PiIVrJ14YRW7pVxk+tEmepaGVmZmplPwyKBpWO1TYKC7/oDta/BTbUexuS1UM0qxN1tygMrv9ECHAlUPGScqncAqrXXggfJ2BizBmkPaCfnDl8KeuT0wnFtH8rWBsrVvnQaXT3XsjOXzYaE/SXgat4HyK4DqGS6JHMl7+fQKi3rdHSDVvq2e7a2xTVnoOMPLpP2R21koENEqm7aiCNHLm+eUKNGDcyZk/mbOQk2MvKllE0+lGTUtGnTy57fQ7I6kydPzvY2q9WKb775Rq2f45cGDkRuEsjIgpVSSdIzQ9mVZAdkvRFpcSvla8EU6Hy3TFtbSsrs8pr4f0OrKvhnw1HM33FCzV8ZN7yN6g7mL2eT0rBk18kCBTqe9XQk0JEgpm+znJ/n4OlktZCk1WREk8pReb7nlSe0/xcPdKmNUd3r5Vi+VzLMrObWtK1ZDi/9vVkFOaJ/s0qomCUwUh/y984Hks5oAYjRApjcm+e8dLlN2KgFNUfWaBmF/JLMgme+hwQ/YVHAuYNaYJN4WFaDv/wx0x7XMhkN+mpbTJPM5VISeG2cpAKGqsc2oWqGYbnCuBXY9Szw4adAm7uAlkOBkhkW7ZQP7n89ChzbqF2ucSXQ/0MtOCiIq58D2owAFrwNrB6nAoErds/Gp2Ed8KutE24vvR6Nf16oLefg+RK/0QCg1TCgekfvy8FKVwZKXwc0LuSyZ9kfFUDVBJreoF0nc+WPbbqU5ZGgqpDpItDxRKQGlq4RERGl+9e9dk6bGmUvCwCkvEkCnWBaOPSCDZiyTttnb9b4kYBtzKCm6PXBAhVI9PlwET66pbmag+QPMo9Fvm2XjIysFJ9f0ib6h2X709fHycnag1oA0qhSVI5lVYa0C3i/2UHMPfgzohxn0KR6DOJSooFpVsAcrn0mklOZTyLBgJQ3WUuqsbq1XTXVOe7BCWtw8nwq7rmqlvak8qWNZGTWT9QCBJk876syNYBKLbQtqvKlhaNTL2ilV2oR6AtA6nmt9CnxKHD+KOByaKeySdCUlQRXkr2RMqvSMt9kj3sSvrs8a94YoHQ1oEEfLeOxcwaw6Q8tEyPM4XA2GojnDrTC/GNh+KXVVlTZM0kr8Zo9Gpj3BtB0MNBqqPa4FV8ALqdWwtXjNaD5rf6bcxIZA/R9B2j/ADDnNWDTb+hjWII+1iWAe3dRvr62L/G3aNmr4sRsBSq31DaMKJqXhB7IH7T8LTiZ0SEi/5BmBlJWlp3q1aur+T9EwU7mb+S0yKUnGJCWxTLhPhBrnmS15JgBqXan+jDe2stucJWiS+CPBzrigZ9WY8exC7jt6+UquyFzdqQrlj8CRV8WCc2OpwxN5hPJuis5tTPOuH5OJid3ATv/0z7E71uMsk4b1Aw+eRppsqY1Wsue0azNmZDMRI1OaFK1Heb8rwsupNhR2n4CWPStNtH+xLZLj5EMS4UGWjZFMjxOG+Cwu09t2n0qNrwU2MiWnw/lUpompV9SniabBDupiUBUFS2wUXNIYi8v07p4EtjxH7BtqjZ/5twBYPnn2uZRoSHQejjQ7EYYS5TBs0kpaDTtP8QMeAXAS8DmP4DlX2gB3roftc2j2c1Az9e0ZgOFQSbs3/AN0PFROCXYOrwWxrrdteyNBKZcQ8drugh0DBZtcqVJ/gCJiPxAmga0a5d57QUPS3b1ykRBRia+exYjzG6RS2lZXD8mEtuPnVcLS8pE6ECSAGBhgtGndsce0kHrzwc74bkpm1TL3ndm7FBzj6RcK78BnKzhs8hdtiYr2eeLlF/tX4oaSSfxf+GrEGE/h6Qfv4PVdV7Lmkg7XylRCo9SJVt9jtoRb7Gg+emqwKw4LQuya9blpWFlasJRpzs2JaShSYO6MLlsWtmQdJ91uE+lfEtaEEsZmJzKtvAdlSExVWmN0vK6exdIOufSl8ZSBhZ/K1CrS87zZvxJAhgJZGRTWQAvSQDSYoi2SVeyPXO1oEc6kkmbZQlw5DTD71CYxYRIz79uS7iWqZGsyaGVWoC05U+gdBWg3/tA7atRJOKawXjb70XzWiFKF4GO0d11zcSMDhH5SWRkpNqIiqvp7myOZEayTlDPWL4mgY6sNRLoQEeyT4k2A2Iiw/I1H6aE1YR3b4xHu1pl8fyUTWoOi7RU/uTWlr6tFSQZi4QN2LfkP3xonIsW1v2IW3gV0PddLSDxlmQcJg1TJVwGTyGPfCrLbhrgOe1ENZqX5IU0RNOaol0q36rREajbA6jbU80Xcdrt2DdtGhq17QNTTl++SEmaTOTfuxDYtwjYt1Cb9yKlXx7VOgDxNwONBwLh+V9TJmCsEZfm6vhKAiHpXiZbyjnAUrJoAjzyG138tAzu0jWTk3N0iPwp62R7Kp74c9Qnbxa5lIYEsjCnNCQobNKi+fj5nNe7G7dUa0IwpF3VfK1U7yFrvDStXBoP/rQGe05exMNfTMWLbezo0aAcDJLFkFKujBPr5bK0zz3gXoBRJlHbkhAPIF6CDvnz2firNvn+1onaXJS8SNesf0Zp808qNgYqNsD6UybMOeBAlcqVMfjKeCCinDYPRMrDUs5h96Ej+HLmesSFpeHRThVhkLkt8uI1OmkZFunclZ8P8rK/srW8XQt8JDskAY/MlWnQT5tMTsUzyCN9BDpGd+mamYEOkV94SrOSkpJQokTuaz5Q8JOfo2DJnX4cT0zByv2n1fneGdpKZ9WuVjm1xsuB00lqYdG8upwVJNge9NkSrM9jMr7F4MLNbark/4VkXawT29Dw8DL8V30JzqUuQnnbUWA9tM3b/Q2PxrykmljhqI/b+3RGpaUvAye2Al9dDdz0o9bqN9sHuoC5rwML3tIuSxnYtR+pgOr8zpP48JvlqHqhBAY3vbw0as6RPfjFURY9asTAcHVrFAoJfKR7WEE7iBEFCX0EOlYto2N2MdAh8geTyYTo6GgcP348fQ0Yb+vlZQFOWRcmJSWFrZK9VFhjJh8uJciRn6P8POXnSvowfXOC+szdslp0rgtUymKczatGq7k8ktW5uW21QtmfTYcT04McaZ2cHfnVvzrWhjIRecypkTcm67RICZZMXlcT2Q9ra3gcXKaVIEnQJA2s5O8LRmx3VkaaMRxNYiNgkixL1gn20qFMVpGXieDVrsCfh0ph5K8b1NyfpzpeBTTtCvx8M3B0PfDdtVq7YZkfkpE851+PAOsnaJc7Pwl0fTZ9nkh81dLqrLSQPnE+Vc2RykgWbxUtfSmzI9I5XQQ6JvkHpQIdNiMg8pfYWO1bYE+w48uH6+TkZJUJ8mUysZ4V9phJkOP5eZI+TN2gdQvzZq6LzNNRgc7uU4UW6EzbdKl72dgh2U86t9lsmDZt2qWgQRYjPLlTW1FeTqXk6rwENUe1FdlzYonQVmX3LK5YuTXu+2QN9p9KwivNm3i1QObU6avUaR9PEwdZ32X4v8CU+7VJ638+oHUp6/aSNqFeysB+vUPrAGYwAf3e0zpoZRAZbkG9ilrzB1lPp0fj2Ow7rlXN0nGNiPQe6GjfiliY0SHyG/nAHRcXh4oVK6oPIN6S+y5YsACdO3dmqVQQjJk8HzM5+iLZghX7TufYbS2rjrXL4aPZO9XCmE6nq8AtmbML5Ke52zRnW0ZnS1ZzY4x7FqLtnvkwf/4KcGZv9gtFZlSywqVFJmUrV1fLyMQ21ebeuEn+aHiHGnjp7y0Yt2gvhrStlut7vJBqV40MRJ+Mi3taSwI3jNfWbZHStCUfaQFYj1eB34Zpc3gkyBo8HqjXM9vnlrbRKtA5eDZToCMd8hISU1QZYdMqnCtC5C1dBDpmq5bRsTCjQ+R38iHZlw/Kcl+73Y7w8HAGOl7imJE//ecuW4uvGo0qZSK8WuOlhMWEUxfT1IfwhnE+dBbzgqzTI9mUcIsRXetX1MrFjqwF9s4D9szXWgI7UlWzsUz5JwkaytUBytfTNplXIgtRSlAj7YjdHVe9Mbh1Vbw7c4dqTiBBTNcGFXO87+ytx1Sr61rlS6r225fX1/2ftj9/Pgjs+BfYMV1rGhBRHhjyq7ZmTQ4k0Jm48qDK6GSXzZGFSSOsuvjoRuQXuvhrMYVpc3QsYKBDRET65smepJdd5UE6nLWtWVYFADJPx9+Bzox1e9HasA03VzyOkr9/B+xfrC0KmVFkJThrXIlNZyxodOVAmGMbqutUYOEHJcPMuLlNVXy1cC++WbQ310DH061Osk85lpI2G6x1K/v5FuDicaBsbeC237SFIHPR0r1w6PqD52B3OGF2z1da417vyHM7EXlHF4GOxd2MwAqWrhERkT5J2dnYubuwdM8pddmXtWg61SmfHujcfWWtguwEcHI7cGiVatPsOrwKjyRsxmNhTkB2S9s1IDwaqHklUPMqrXVyuTpw2O3YO20aGsrlQshsDu1QQwU5sgjo9oTzqB97ebvmi6l2zN1+3Lvxk3lA9y4Atk8DGl8HRJTNcx9qVyiFyDAzzqfaVfascSWtTE1K2TwZHyLyni4CHXOYVrpmddm1biycAE1ERDpy6kIqHvt1PRa455YM71jDp1bRHeqUU6fL955WZVs+rWNjT9Mm4W+cBOyckSlbI0djKUk75iqDcvXbw1ytHVDrKiC2mTaJvwhJGZ/MWZq2MQHfLtqLN29odtl9JMhJtTtRvVwEGnmT2YqKA9rc5fU+yNyg5tWisXDnSVWuJoGOjPfGw+fSywiJyHu6yugYDS64HGkw+FC3S0REVJyt2ncaD01YqyazyzyYVwY0UXNSfNEwNgplS1pRP3ktjk1fi6q1mwAVGgBlama/UrxkbqQEbdNvWhey5DOZ59ZUaqHmqvx9qhJeW18STRs2xFe3tkGg3dWppgp0Jq87jCd71Ue5UmHZl601iSu0rpHSVc0T6Nx2RXVsPZqogp0yERbUKFc46xgRhSqfAp0xY8bgjz/+wLZt21Sb0w4dOuDNN99E/fr1c33cpEmT8Pzzz2Pfvn2oW7euekyfPn1QVKzujI6wpSbDykCHiIhCnHQz+3rhXrwxfRscThdqVSiJz4a0yrYkKy/GYxvwU9gYNHSsBqSzstZdGTBZtW5mFeoDFRtqDQEOrwE2/aG1evYoFQM0HgQ0uV4LctzB0YfvzUcCLuCpjN3LAkjmwMRXKY31h87hp+UH8Mg1ddNvS05zYM624+ltsAtLC/c6OZ6GBJ71cySbw5b8RIUY6MyfPx8PPvgg2rRpozoAPfvss+jRowe2bNmCkiVLZvuYJUuW4JZbblFBUr9+/TBhwgQMHDgQa9asQZMmTVDUgU5aWorqAElERBSqziXZ8Phv6zFzyzF1+dr4Snh9UFO1AKhPZK2aOa+qsrOGAFJdZqwPb4O2ZZO19WtsScDxzdq2Octjw0sDDa8Fmt4A1LjyslK0ncfOY9fxC2qB0GsaxiAYSCBxZ6eaeHTiOvywbD/uvaoWwszafs/bfhzJNgeqlCmBJpX925Aho+ZVtHk40gHuzMU0rp9DVAA+/cebPl1aJF4yfvx4tYbG6tWr1foO2fnwww/Rq1cvPPHEE+ryK6+8gpkzZ+KTTz7B559/jqJgNZvUP+cwgx225CSAJa5ERBSidhw7jzvHr8ShM8kqiHihfyMMaVfNt2xA0mlgwTvAyq8Ah9bI52L9Qei54Sok2Cpi7WPdEWk1AecOagtjqm27tm5M6SpacFOnW64tnqe6u79dWbc8osKDp226NBkYM22bKvX7Z/1RXN+qirp+2qaE9GxOYWZWypS0qtbVEuisO3QWaw9eyugQURHO0Tl3TpscV7Zszp1Eli5dilGjRmW6rmfPnpgyZQqKikzuS4EFYbDDlpZcZK9LRERU1J6bvEkFOdXKRuDTIS3RpLIPC0ymXQRWfAksfB9I1Y7xqutZt9EoWak5TIfmwn4qCTd/uUw9d/Vy1YEy1XNcADM3nvkuvnR/KwoWkxF3dKiOt6Zvx7eL92JQy8qqAYGsnyN6F8H+SkMCCXQkI3fwdLLqoRRflQuFEhVZoON0OjFy5Eh07Ngx1xK0hIQExMRkTknLZbk+J6mpqWrzSExMTF8d3JcV2D3kMWmQb4uSkZJ0MV/PoSee8eE4eYfj5RuOV/EaM/6cipdjiSlYuf+0Oj9hRDuvFgRVUhK17M3SsUCSu8dzTFOg+2igzjXpd3vr+ma4/6c12HwkEf0+XoS3b4hXncp8JSVr0j7ZYjKgW5CUrWV0S5tq+Gj2TvU+V+w9jbPJNiSlOVA5uoSaw1MUc4X+WHMYf6w5pC7XqxiJyCDKehGFfKAjc3U2bdqERYsW+XeP3E0PRo8efdn1M2bMQERE/jqOtFWBDrBy+WKYd2rpcsqdlBiS9zhevuF4FY8xS0pKKvLXpPybvilBraLQslq0d0GOlKgt/wJY/hmQ4s7glKkBdHkGaHrjZQtytqtVDlMf6YSHJ6zFqv1ncN+Pq1Wnsqd6NfCp5fS/7rK1jnXKo3RE8H2Al/KxQS2rYMLyA2ptnRJSpqe6reWySKgfedbLSbE5M10moiIIdB566CH8888/WLBgAapU0WpXcxIbG4tjx7R0r4dclutz8swzz2Qqd5OMTtWqVVXjg6ioqHx9I3lijfaPtFnjRqjR4mqfn0NPZLzkA1X37t1hKYRF2UINx8s3HK/iNWaejDoVD9PcAUSe5WAXTgBLPwFWfg2kXdCuK18PuPJxrTNadi2j3eJKl8DP91yBt//bji8X7FGBgHQG++TWlirj4dV+bgrOsrWM7uxYQwU6M7ceQ7i7IUFRlK2J+jGRKGExqeYHgoEOUREEOtKq8uGHH8bkyZMxb9481KxZM8/HtG/fHrNnz1Zlbh5ywJbrcxIWFqa2rOQAn9+DvM39Vl0OGz9ceakg461HHC/fcLyKx5jxZ1R8HD+fghX7tLK1HMvJzh4Aln4KrB4P2JMvlah1flzrkJYlg5PbPJZn+zRE6+pl8Pik9aozWN+PFuL9G5uja4OKuT5278mLam0Ys9GAHo2Cr2zNo07FSFxVrwLm7zihAo7YqPAi63xmNhnRrEpptUCrYCMCovwx+lqu9uOPP6oW0ZGRkWqejWzJyZcm+N9xxx0qI+Px6KOPqm5t7777rlp/56WXXsKqVatUVqgopRms6tTBZgRERBSC/tt8TJWtxVfNpmzt6Hrgt7uAD5trZWoS5FRqCdwyEbhvIdB4oNdBTkY9Gsdi6iNXqg/lZ5NsGD5+Jd6avg12h1ZylVvWqUOd8oiO0I7NwUrK8jx6N41VzY2KSkv3ejqRYWbUqVCqyF6XKJT49F/ts88+U53WunTpgri4uPTtl19+Sb/PgQMHcPTopTkwsqioBEZffvkl4uPj8dtvv6mOa0W1ho6H3Z3RcdhSivR1iYiIioJn3ksfTzZHop6ds4DvrgW+6Axs+k3KGrQuarf9AYyYA9TvLYvHFOh1q5aNwKT72uOO9tXV5U/n7caQr5fjeGL2x9t/N2XZzyAmra8bV4qCxDcDm1cu0teWbJLoXL9CkQZYRLouXcuLlLRlNXjwYLUFks1gAVyAk4EOERGFmJMXUrFsj9YtrU+j8sC6n4ElH2sLeQqDCWgyCOjwMBAX7/fXl0U1Xx7QBG1qlMXTv29QJVd9PlqEj25urjI3HgdOJWHT4USYpGytcfAHOtJ44Ie72qludg3jCm+R0Oxc4W78IIEkEQVgHZ3ixObuuua0XWpbTUREFApmbD4GpwtoWrk0qs4fpWVvhKUk0GoocMX9QHS1Qt+P/vGVVAbkgZ/WYFvCedz2zXKM7FYPD3Wto7IS09zZnPa1yqFsyeAuW/OQ/QzUvjauxLVziArC94LcYsouGR3JSjGjQ0REIcZTDnZf3A4tyJEMztXPA6M2A73GFEmQ41GrQilMfqAjbmpdVQVf783cgWHjV+LUhdT08jqZ70JEVNh0F+iwdI2IiELJ6YtpWLL7FEogBT33v6Nd2f5BrZNaicB065J1Z968oRneGRyPcIsRC3acQO8PF2L9oXNqvkvPYlC2RkTFn24CHYfBXaVnZ+kaERGFjplbEuBwuvBK6X9gPn8YKF0N6PI0gsENrargzwc7oVaFkjh+Xjv+tqtZDuVLXb6EBBGRv+km0LG75+jAzowOERGFjmkbE9DIsA+DUv/Uruj7LmAtiWBRPzYSfz3UCQObV1IN3m5tV3RldESkb7ppRmA3uufo2NMCvStERER+cTYpDUt3Hcevlm9ghANoNACo1wPBplSYGR/c3AJjBjVTZW1EREVBNxkdp3uOjsHBjA4REYWGmVuO4UbDLDQ37gaskUCvNxHMGOQQUVHSTaDjSA90OEeHiIhCw5K1G/GkeaJ2oduLQFRcoHeJiCho6CbQcRq1Kj2Dg6VrRERU/J1LtqH7wQ8QZUhGSsXmQOs7A71LRERBRTeBDjM6REQUSjbNm4Q+xuVwwIjw6z4GjCwLIyLSZaDjdDcjMDKjQ0QU0hwOB55//nnUrFkTJUqUQO3atfHKK6/A5XKl30fOv/DCC4iLi1P36datG3bu3IliI+0i6q9+SZ1dW+kWIK5ZoPeIiCjo6CfQcWd0jE5mdIiIQtmbb76Jzz77DJ988gm2bt2qLr/11lv4+OOP0+8jlz/66CN8/vnnWL58OUqWLImePXsiJaV4NKxJnT0G5e3HcMhVHlG9Xwj07hARBSXdBDoud0bHxIwOEVFIW7JkCQYMGIC+ffuiRo0auOGGG9CjRw+sWLEiPZvzwQcf4LnnnlP3a9asGb7//nscOXIEU6ZMQdA7vhWWFZ+qs59F3Ie6VWICvUdEREFJN+vouNzNCEwuBjpERKGsQ4cO+PLLL7Fjxw7Uq1cP69evx6JFi/Dee++p2/fu3YuEhARVruZRunRptGvXDkuXLsXNN9982XOmpqaqzSMxMVGd2mw2tfnK85j8PNY0+1UYXQ7McLRC6Wb9YLfboQcFGTM94nj5huNVvMbL29c16y2jY3Yy0CEiCmVPP/20CkQaNGgAk8mk5uy89tprGDJkiLpdghwRE5M5EyKXPbdlNWbMGIwePfqy62fMmIGIiIh87+vMmTN9un9U0n503f4PnC4D3rbfhOvO7MC0aTugJ76Omd5xvHzD8Soe45WUlOTV/cx6a0ZgYqBDRBTSfv31V/z000+YMGECGjdujHXr1mHkyJGoVKkShg4dmq/nfOaZZzBq1Kj0yxJIVa1aVZXERUVF5evbSPmA0L17d1gs2vHJG6ZJt6vTv53tYStbD3ff0BEGgwF6kN8x0yuOl284XsVrvDxZ9bzoJtCBO9CxsHSNiCikPfHEEyqr4ylBa9q0Kfbv36+yMhLoxMbGquuPHTumuq55yOXmzZtn+5xhYWFqy0oO8AU5yPv0+CNrgR3/wgkjPrJfhz5N42C1WqE3BR1zveF4+YbjVTzGy9vX1E0zApg8gQ5rL4mIQpmUNBiNmQ9vUsLmdDrVeWk7LcHO7NmzM307KN3X2rdvj6A1d4w6+cvZEbtdlVWgQ0REOdNPRsegvVUzAx0iopDWv39/NSenWrVqqnRt7dq1qhHBnXfeqW6XUi8pZXv11VdRt25dFfjIujtS2jZw4EAEpUOrgJ3/wWkw4QPbQFQrG4HGlXwvmSMi0hPdBDoGd0bHCpauERGFMlkvRwKXBx54AMePH1cBzL333qsWCPV48skncfHiRdxzzz04e/YsOnXqhOnTpyM8PBxBae7r6mR5ZHfsS47DvU1jdTM3h4gov3QT6Lg8pWtgRoeIKJRFRkaqdXJky4kECS+//LLagt6BZcDu2XAZTHjxbB91VV+WrRER5Uk3c3QM7nV0rLAD7jptIiKioOfO5hyqMQg70sqjcnQJNK1cOtB7RUQU9PQT6LgzOorj0qJvREREQWvfYmDvfNU59BvDIHVVH5atERF5RTeBjjFDoOOypwR0X4iIiLwyT+u0Zm9+Gybt0g7Z7LZGROQd3QQ6JqNJrSQtUlOSA707REREudu7ANi3EDBZsThuKC6mOVCpdDiaV40O9J4RERULugl0zCYDUqFldWxpzOgQEVEQc7nS5+ag1TBM2aN9UderSRzL1oiIvKSbQMdkANLcTeZszOgQEVEw2zMXOLAUMIUhtf2jmLXlmLq6b7PYQO8ZEVGxoZtAR74AS4VVnbelMdAhIqIgNv8t7bT1nViUYMH5VDtiosLQomqZQO8ZEVGxoZtAR6S5S9fsLF0jIqJgdniNdtrmLkzbmKDO9m4SB6ORZWtERN7SVaBjM7gDndSkQO8KERFRzvNz3MsgpJkjMXOLFuiw2xoRkW90FuhopWuONK6jQ0REQcphSz+77OBFJKbYUSEyDK2qs2yNiMgXugp07O6MjsPGOTpERBSkMqz1Nn3rKXXaq3EsTCxbIyLyic4CHU9Gh3N0iIgoSDnS0s/+u/W0OmXZGhGR73QZ6DhtLF0jIqIgZdeOUU6jBWeSHShX0oq2NcsGeq+IiIodXQU6DqMn0GHpGhERBXfpmqdTaM8mLFsjIsoPnQY6zOgQEVFwl64lO7VFrvuybI2IKF90Gei4bJyjQ0REwV26luIyo0yEBe1YtkZElC/6DHQydLQhIiIKxoxOqsuCno1jYTbp6lBNROQ3uvrv6TSFqVOX/VJHGyIiouCco2NGb5atERHlm64CHZdJy+gYmNEhIqJg5f4yTpoR1I+JDPTeEBEVWzoLdJjRISKi4ObIkNEJM+vqME1E5Fe6DHSMDmZ0iIgoODlStSUQUl1WWBnoEBHlm67+g7rMWqBjcLC9NBERBSdbGjM6RET+oK//oOmBDkvXiIgoODk8gY7Bwo5rREQFoKv/oAZP6ZqTGR0iIgpO9jTtGGU3aA10iIgof3SZ0TEyo0NEREHKYdPm6DgMlkDvChFRsaarQMdo0QIdk5OBDhERBSenLTXTItdERJQ/ugp0DJZwdWpi6RoREQUpBwMdIiK/0FegY/YEOrZA7woREVG2XJ7SNQY6REQFoqtAx+QuXTO7WLpGRETByWnXMjpOEwMdIqKC0FegYy2hTs2co0NEREHK5Q50XMzoEBEViL4CHWZ0iIgo2KVndLRjFhER5Y++Ah2rNkfHCs7RISKi4A50wNI1IqIC0VegY3GXrrkY6BARUZByr/XmYkaHiKhAdBXomMM8GR2WrhERUXAyeDI67kWuiYgof/QV6HiaEcAJOOyB3h0iIqLLGBwMdIiI/EFXgY7FndFRPAcSIiKiIGJwl64ZGOgQERWIrgIda8ZAx1MaQEREFESM7iUQGOgQERWMvgIdixU2l0mdd9hSAr07REREOQc6lgxfzhERkc90FeiEmY1Ig1mdt6UkB3p3iIiILmNyl1abzAx0iIgKQleBjtVsRCos6rwtjRkdIiIKPib3EggG9yLXRESUP7oKdMxGA9I8gU5qUqB3h4iI6DImd+maiYEOEVGB6CrQMRgyBjosXSMiouBjdrkDnYwNdIiIyGe6CnREmsGqTu1sRkBEREHI7C5dM1u0td+IiCh/dBfo2N0ZHTszOkREFGxcrkuBjpWla0REBaG7QMfmzug4bFxHh4iIgozTDiNc6qzZytI1IqKC0F2gYzdqGR0HMzpERBRs7JfKqi1hEQHdFSIi3QU6CxYsQP/+/VGpUiU1uX/KlCm53n/evHnqflm3hIQEBILdndFxZjiYEBERBQW71ohAMKNDRFTEgc7FixcRHx+PsWPH+vS47du34+jRo+lbxYoVEQgOo1bz7GQzAiIiCjbuxUJtLhPCrNoC10RElD8+/xft3bu32nwlgU10dDQCzeEuXWOgQ0REQceuBTqyuHWYWXfV5UREflVkXxc1b94cqampaNKkCV566SV07Ngxx/vK/WTzSExMVKc2m01tvvI8Rk7tRnczgrSUfD2XHmQcL8obx8s3HK/iNWb8OQUm0EmDGWFmU6D3hoioWCv0QCcuLg6ff/45WrdurYKXr7/+Gl26dMHy5cvRsmXLbB8zZswYjB49+rLrZ8yYgYiI/E/OnDlzJiypTnX+2KH92DttWr6fSw9kvMh7HC/fcLyKx5glJSUV+Wvqmrt0TRa3ZkaHiCjIA5369eurzaNDhw7YvXs33n//ffzwww/ZPuaZZ57BqFGjMmV0qlatih49eiAqKipf30jKB4Tu3btj6e7JQCJQsVxpNOrTJ5/vKrRlHC+LRSv1o5xxvHzD8SpeY+bJqFPRcNlTYZBAx2VGuIWBDhFRQQRkpmPbtm2xaNGiHG8PCwtTW1ZygC/IQV4e6zJrXWwMThs/ZOWhoOOtNxwv33C8iseY8WdUtKSsWg7MqbCitImla0REBRGQr4vWrVunStoCwWWyZqqDJiIiCha2tJRLc3SY0SEiKtqMzoULF7Br1670y3v37lWBS9myZVGtWjVVdnb48GF8//336vYPPvgANWvWROPGjZGSkqLm6MyZM0fNtwkEl0nLFBncddBERETBwpaajBLuOTpWEwMdIqIiDXRWrVqFrl27pl/2zKUZOnQoxo8fr9bIOXDgQPrtaWlp+N///qeCH2kk0KxZM8yaNSvTcxQpT+kaAx0iIgoy9vSMjgVGo8zWISKiIgt0pGOay+XK8XYJdjJ68skn1RY03KVrRpauERFRkJE5OsJucJdZExFRvukvL27xNCNIC/SeEBERZWJ3L2btWdyaiIjyT3eBjsGszdExsXSNiIiCjNMd6NgNDHSIiApKt4GOkRkdIiIK1tI14+VLLBARkW90F+gY3aVrJgY6REQUZJw2rdrAydI1IqIC02Ggo31LZmagQ0REQcZp1zI6TiObERARFZT+Ah2rrFAAmFwMdIiIKLi4bNqxycHSNSKiAtNdoGNyl64xo0NEFLpk7bbbbrsN5cqVQ4kSJdC0aVO1DpyHLJPwwgsvIC4uTt3erVs37Ny5M6D7rPbLnqydupdCICKi/NNtoGNhRoeIKCSdOXMGHTt2hMViwb///ostW7bg3XffRZkyZdLv89Zbb+Gjjz7C559/juXLl6NkyZLo2bMnUlK00rGAsWvHJgY6REQBWDC0uDOHaaVrZtgCvStERFQI3nzzTVStWhXjxo1Lv65mzZqZsjkffPABnnvuOQwYMEBd9/333yMmJgZTpkzBzTffjIBxL33AQIeIqOB0m9GxuhjoEBGFor/++gutW7fG4MGDUbFiRbRo0QJfffVV+u179+5FQkKCKlfzKF26NNq1a4elS5cioOzuQMesHauIiCj/dJjRcZeuSUbH5QIMhkDvEhER+dGePXvw2WefYdSoUXj22WexcuVKPPLII7BarRg6dKgKcoRkcDKSy57bskpNTVWbR2Jiojq12Wxq85XnMZc91hPoGC35et5QluOYUbY4Xr7heBWv8fL2dXUX6FjcXdeMcAEOG2BmeQARUShxOp0qo/P666+ry5LR2bRpk5qPI4FOfowZMwajR4++7PoZM2YgIiIi3/s6c+bMTJdrJp5Rp6fOXsC0adPy/byhLOuYUe44Xr7heBWP8UpKSvLqfvoLdNxzdBRZr4CBDhFRSJFOao0aNcp0XcOGDfH777+r87Gxser02LFj6r4ecrl58+bZPuczzzyjMkQZMzoyD6hHjx6IiorK17eR8gGhe/fuqmmCx8FdH0EKDsrHxKFPnz4+P28oy2nMKHscL99wvIrXeHmy6nnRXaBjzRjoONh5jYgo1EjHte3bt2e6bseOHahevXp6YwIJdmbPnp0e2MhBU7qv3X///dk+Z1hYmNqykgN8QQ7yWR9vci99YAqL4IetHBR0zPWG4+UbjlfxGC9vX1N3gU6YxYRUlxlhBjtctmRwhg4RUWh57LHH0KFDB1W6duONN2LFihX48ssv1SYMBgNGjhyJV199FXXr1lWBz/PPP49KlSph4MCBAd13o1OrOzeauWAoEVFB6S7QsZqNSIUFYbDDbpNzREQUStq0aYPJkyercrOXX35ZBTLSTnrIkCHp93nyySdx8eJF3HPPPTh79iw6deqE6dOnIzw8sN3OPBkdg4WBDhFRQeku0Akzm3BRhTfJsKUmM9AhIgpB/fr1U1tOJKsjQZBswcQT6BgtGcqsiYgoX4x6zegIe2pyoHeHiIgoncnlnqPDjA4RUYHpLtAxGQ1Icwc6ttSUQO8OERFROrN7jo7JykCHiKigdBfoCJsno5PGjA4REQUPszujY2bpGhFRgekz0DFoa+c4bAx0iIgoeFhcWkbHHBbYpghERKFAp4GOZ44OS9eIiCh4mGW1UDm1MqNDRFRQugx07OkZHQY6REQUJBx2mOBUZ81WZnSIiApK34FOGgMdIiIKEvZLxyRLGDM6REQFpc9Ax6gFOs4MBxUiIqKAcmiNCISVc3SIiApMl4GOwx3ouGypgd4VIiIijV07JtldRoRZteMUERHlny4DHacno8M5OkREFCwcWqAja72FmXV5eCYi8itdZ3TAQIeIiIKEy31MSmWgQ0TkF7r8T+o0aStOu9xlAkRERIGW5l7yIA1mWBnoEBEVmC7/k7pM7owOAx0iIgoSNncn0DSXZHRMgd4dIqJiT6eBTnimemgiIqJAs6Ulp5euWUyGQO8OEVGxp/OMDufoEBFRcLClaIGOzWCBwcBAh4iooHQa6GhzdIzM6BARUZCwu5sR2A2WQO8KEVFI0GWgA3PYZYuzERERBZIjzb2ODgMdIiK/0HWgw4wOEREFC4d7jo7NwMVCiYj8QZeBjsGiNSMwOpnRISKi4OBwl645GOgQEfmFPgMdsxbomBjoEBFRkHDatCoDh5Gla0RE/qDPQMeila6ZWLpGRERBwpEe6LjnkRIRUYHoMtAxukvXTC5boHeFiIhIcblL15wmZnSIiPxBp4GOO6PjZEaHiIiCg8uuHZOczOgQEfmFLgMdk7WEOjUzo0NEREEW6LiMbEZAROQPugx0zO7SNQubERARUbCwe0rXmNEhIvIHXQY6Jqs70AEDHSIiChJ27ZjkMjGjQ0TkD7oMdMwsXSMiomDj6QTqXtSaiIgKRp+BTpiW0bHCDjidgd4dIiIiwOGuMjAzo0NE5A/6DHQsWkYn04GFiIgogIzpGR3tyzgiIioYXQY6lvASl03+JCIiCiSD+4s3A0vXiIj8QpeBjtVihdNl0C6423kSEREFQ0bHyECHiMgvdBromJAK98rTnlIBIiKiADJ6ljxgoENE5Be6DHTCzCakwazOu2wsXSMiosAzuQMdzxIIRERUMLoMdKxmI1KhdbWxpTHQISKi4Al0jBZmdIiI/EGXgU6YBDourXTNlpoc6N0hIiKCyb22m8nCjA4RkT/oMtCxmozppWt2BjpERBQEzJ7StYxLIBARUb7pMtAxGg1IM2ila3aWrhERURAwuzM65jCWrhER+YMuAx1hc3dds6cxo0NERIFngTvQYekaEZFf6DfQYUaHiIiCiMWlla6Zw1i6RkTkD7oPdJwMdIiIKIgyOha2lyYi8gvdBjoOo1a65rCzdI2IiALM6YAZTnXWwowOEZFf6DbQsadndFIDvStERKR39kvHIquVgQ4RkT/oNtBxGN2Bjo2la0REFGD2S8ciawkGOkRE/sBAJ8O3aERERIHgaYzjcBkQZtWOT0REVDD6DXRM2joFLmZ0iIgowGzuQCcNFljNuj00ExH5lW7/m7rcGZ2M5QJERESBkJaiNcZJlUDHpNtDMxGRX+n2v6nTk9Fh6RoREQVRRsfMQIeIyC90+9/UadIyOgYGOkREFGC2VC2jY4O29AERERWcbgMduDM6cGgrURMREQW6GYHNwECHiChggc6CBQvQv39/VKpUCQaDAVOmTMnzMfPmzUPLli0RFhaGOnXqYPz48Qg0l1kLdAyco0NERAFmT3NndBjoEBEFLtC5ePEi4uPjMXbsWK/uv3fvXvTt2xddu3bFunXrMHLkSNx9993477//EAwZHYODpWtERBRYDncHUM9i1kREVHBmXx/Qu3dvtXnr888/R82aNfHuu++qyw0bNsSiRYvw/vvvo2fPnggYS7g6MTpZukZERIFlT9O+dLMzo0NEVHzm6CxduhTdunXLdJ0EOHJ9IBncGR0jMzpERBRgDvccHbtn6QMiIir6jI6vEhISEBMTk+k6uZyYmIjk5GSUKFHissekpqaqzUPuK2w2m9p85XlMxse63F3XjI60fD1nKMtuvChnHC/fcLyK15jx51Q0nO7SNSdL14iIik+gkx9jxozB6NGjL7t+xowZiIiIyPfzzpw5M/384aPH1akj9QKmTZuW7+cMZRnHi/LG8fINx6t4jFlSUlKRv6YeOd1LHThMLF0jIio2gU5sbCyOHTuW6Tq5HBUVlW02RzzzzDMYNWpUpoxO1apV0aNHD/W4/HwjKR8QunfvDotFO4jMmJYMrAVKmFzo06ePz88ZyrIbL8oZx8s3HK/iNWaejDoVLqdNC3ScLF0jIio+gU779u0vy5jIAVuuz4m0oZYtKznAF+Qgn/Hx5rCS6tTksvHDVg4KOt56w/HyDcereIwZf0ZFw+Ve6sBpvPzYR0RERdSM4MKFC6pNtGye9tFy/sCBA+nZmDvuuCP9/vfddx/27NmDJ598Etu2bcOnn36KX3/9FY899hgCyWTVuq6ZXey6RkREgeXyZHTc80eJiCgAgc6qVavQokULtQkpMZPzL7zwgrp89OjR9KBHSGvpqVOnqiyOrL8jbaa//vrrwLaWljfubi9tdnGiLRERBZh7jo6nUQ4REQWgdK1Lly5wuVw53j5+/PhsH7N27VoEE3OYFuhYmNEhIqIAc3mWOnAvfUBERMVgHZ1gZXJndKwMdIiIKMAM6RkdBjpERP6i20DHYtU6vpngBBz2QO8OERHpmMHh/tLNzECHiMhfdBvomMMztLb2lAwQEREFgMGpBToGBjpERH6j20DHYskQ6LhLBoiIKPS88cYbMBgMGDlyZPp1KSkpePDBB1GuXDmUKlUK119//WVrvhUlo/s4xECHiMh/dBvohIVZYHOZtAsMdIiIQtLKlSvxxRdfoFmzZpmulyUO/v77b0yaNAnz58/HkSNHMGjQoIDtp9Gd0WHpGhGR/+g20LGajEjzNJ1zL9RGREShQ9Z9GzJkCL766iuUKVMm/fpz587hm2++wXvvvYerr74arVq1wrhx47BkyRIsW7YsoIGOZ+kDIiIqOP0GOmYjUuFe8dszCZSIiEKGlKb17dsX3bp1y3T96tWrYbPZMl3foEEDVKtWDUuXLg1woMOMDhFRwNbRCRVhKtDRFmZzpiXrN+IjIgpBEydOxJo1a1TpWlYJCQmwWq2Ijo7OdH1MTIy6LTupqalq80hMTFSnEjDJ5ivPYzynJqd7jo7Jmq/n04OsY0a543j5huNVvMbL29c16zmjc8FlBgwyWCngd2hERKHh4MGDePTRRzFz5kyEh/unFGzMmDEYPXr0ZdfPmDEDERER+X5e2UfRyKaVUO/edwAJ06YVYE9Dn2fMyDscL99wvIrHeCUlJXl1P10HOp7SNVtqMgMdIqIQIaVpx48fR8uWLdOvczgcWLBgAT755BP8999/SEtLw9mzZzNldaTrWmxsbLbP+cwzz2DUqFGZMjpVq1ZFjx49EBUVla9vI+UDQvfu3WGxWHB0/f9BlnVr0LgJmnbo4/Pz6UHWMaPccbx8w/EqXuPlyarnxazvZgTaD8aeymYERESh4pprrsHGjRszXTd8+HA1D+epp55SAYocmGfPnq3aSovt27fjwIEDaN++fbbPGRYWpras5HkKcpD3PN7s0uboWMNL8UNWHgo65nrD8fINx6t4jJe3r6nbQEfWVEgzuAOdtORA7w4REflJZGQkmjRpkum6kiVLqjVzPNffddddKkNTtmxZlZF5+OGHVZBzxRVXBGSfLS6t3txsZX0BEZG/6DbQETaD1ozAwUCHiEhX3n//fRiNRpXRkSYDPXv2xKeffhqw/bHAE+hkWMyaiIgKRNeBjl0yOi4JdFi6RkQUyubNm5fpsjQpGDt2rNqCgSfQsYRxHR0iIn/RdVdluyejY7vUMpSIiKioWd2la5YwZnSIiPxF14GOw+heR8fG0jUiIgoMl9MBi8GhzltZukZE5De6DnQ8GR0nMzpERBQgtgydP61+WveHiIh0Hug4jFp3G5d7oTYiIqKilpp6aeE7a1j+Fx8lIqLMdB3oON2lay47Ax0iIgqMtFStfNrpMiDMqh2XiIio4HQd6DhMnkCHpWtERBTY0rU0mGEw6vqwTETkV7r+j+ryBDqco0NERAFic2d0PItYExGRf+g80HGvQO1goENERIFhd6/llgaWrRER+ZOuAx2nSetuY2DpGhERBbh0zQ5mdIiI/EnXgQ7cpWsGZnSIiCjAGR0bS9eIiPxK14GOy+xer4CBDhERBYgjTZujY2egQ0TkV7oOdAzujI6RgQ4REQU4o2N3L3lARET+oetABxYto2N0pAV6T4iISKec7s6fDmZ0iIj8SteBjsGsdV0zOhnoEBFRYDhsWkbHwYwOEZFf6TrQMVq0QMfkZOkaEREFhis90HEveUBERH6h80CnhDo1MaNDREQB4nQvceB0zxslIiL/0Hmgo317ZmagQ0REAeLyBDpGztEhIvInnQc6WjMCk8sW6F0hIiKdcrmbEbiY0SEi8itdBzomq1a6ZmFGh4iIAsWd0XGZ3Gu7ERGRX+g80NFK1ywuBjpERBQg7rXcmNEhIvIvXQc6ZmuEdgob4HIFeneIiEiPPGu5MdAhIvIrXQc6Jqt7wVC4ZCGDQO8OERHpkMFdugYzS9eIiPxJ14GO2T1HJ2PpABERUVEyeNZycy9iTURE/qHrQMcaluHbM883akREREXI6C5dMzDQISLyK30HOmYTUl1m7YJdW5maiIioKBncgY7RzDk6RET+pOtAJ9xiRCrcC7Qxo0NERAFgci9xYLBkKKcmIqIC03WgYzWZkMZAh4iIgiDQMVpYukZE5E/6DnTMGTI6bEZAREQBYHKv5eZZ242IiPxD14FOmAQ6Li3QsaclB3p3iIhIh8zujI6JpWtERH5l1HtGx1O6Zk9loENEREXP5NLWcTNauI4OEZE/6T7Q8ZSu2dPYdY2IiIqe2R3omFm6RkTkV7oOdMxGw6WMDgMdIiIKAEt6oMPSNSIif9J1oGMwGGAzaIGOg4EOEREFgMXdjMBiZekaEZE/6TrQEXaDtkAbmxEQEVEgWKFldCxhDHSIiPxJ94HOaUNZdWo5ujrQu0JERDrjdLpggV2dt4SxdI2IyJ90H+jMMl+lTkvv+hNIvRDo3SEiIh1JczgRBnfpGgMdIiK/0n2gsyWsKfY4Y2GyXwQ2/R7o3SEiIh1JtdlhNTjUeSsDHSIiv9J9oGM1mzDR0VW7sOa7QO8OERHpSGrqpUY4bC9NRORfug90wsxG/O7oDKfRAhxeDRzdEOhdIiIinUjLsFi1wcxmBERE/sRAx2zCKZRGQtw12hXM6hARURGxpWTo+GnSuoASEZF/6D7QsZq1IdhT7Qbtig2/AmlJgd0pIiLSBbtNK11LlcWrDYZA7w4RUUjRfaAjpWtio7U5UKYGkJoIbJ4c6N0iIiIdsKWlaqcS6BARkV/pPtBpU1NbR+f92btwuNZg7UqWrxERURFwuBerthkY6BAR+ZvuA50RV9ZCt4YVkWZ3YvjaenAZzcDB5cDxrYHeNSIiCnH2NK10zWbg/BwiIn/TfaBjMhrw4c0t0LhSFHYklcQiY2vthtXM6hARUeFyuEvX7MzoEBH5ne4DHVEyzIxvhrZBbFQ4vk66Sl3nWv+ztMMJ9K4REZEOmhE4GOgQEfkdAx232NLh+GZYa6w2N8chV3kYUs7CteXPQO8WERGFMKe7dM1uZOkaEZG/MdDJoHGl0vjwllb41dFFXT465/NA7xIREYUwp10rXXMw0CEi8jsGOllc0zAGlbqMgMNlQKVza7BwyeJA7xIREYV4oONkoENE5HcMdLJx0zXtsLN0B3V+5/SxWH/wbKB3iYiIQpDLPUeHgQ4Rkf8x0MmGwWBA3d4PqfMDDfPx4PdLkWJzBHq3iIgoxLiY0SEiKjQMdHJgqtcDzsg4lDVcQPOLizF1w9FA7xIREYUae5o6cZrCAr0nREQhh4FOTkxmGFveoc7eapqN75ftD/QeERFRiGZ0XCZmdIiIgiLQGTt2LGrUqIHw8HC0a9cOK1asyPG+48ePV6VgGTd5XLHQ4na4DEZ0MG1BjcP/YMMhztUhIiL/MTi0QAdmZnSIiAIe6Pzyyy8YNWoUXnzxRaxZswbx8fHo2bMnjh8/nuNjoqKicPTo0fRt//5ikh2JrgpD5yfU2TGWb/Df3PmB3iMiIgol7kDHxdI1IqLABzrvvfceRowYgeHDh6NRo0b4/PPPERERgW+//TbHx0gWJzY2Nn2LiYlBsXHVU0is1BERhlQM2vU0zpw5Heg9IiKiEGFwz9GBmaVrRET+ZvblzmlpaVi9ejWeeeaZ9OuMRiO6deuGpUuX5vi4CxcuoHr16nA6nWjZsiVef/11NG7cOMf7p6amqs0jMTFRndpsNrX5yvOY/DxWhA/+Cic/6oTaOIKdP9+LUnf/JNEbQlVBx0tvOF6+4XgVrzHjz6lwGZxaoGNgRoeIKLCBzsmTJ+FwOC7LyMjlbdu2ZfuY+vXrq2xPs2bNcO7cObzzzjvo0KEDNm/ejCpVqmT7mDFjxmD06NGXXT9jxgyVPcqvmTNn5vuxR8o+iBEnX0Pd4zOw7vv/YX+Fbgh1BRkvPeJ4+YbjVTzGLCkpqchfU09M7tI1g6WYzF0lIgrVQCc/2rdvrzYPCXIaNmyIL774Aq+88kq2j5GMkcwDypjRqVq1Knr06KHm++TnG0n5gNC9e3dYLJZ8vY+kNDvef3s/nsD3aHb4ZzTpdhtclVsiFPljvPSE4+UbjlfxGjNPRp0KOaPDZgRERIENdMqXLw+TyYRjx45lul4uy9wbb8hBukWLFti1a1eO9wkLC1Nbdo8tyEG+II8vbbEgueV9mL5yK3qZVsI4+S7g3gVARFkEgsPpgtPlgsVUeB3CCzreesPx8g3Hq3iMGX9Ghcvo1EoDjRYGOkRE/ubTp2Sr1YpWrVph9uzZ6dfJvBu5nDFrkxspfdu4cSPi4uJQ3NzWvjqesN2Lfa4Y4NxBYPK9MgBFvh+nL6bh2k8Woc1rs/DvRi5kSkRUXJmdWumakaVrRER+53M6QErKvvrqK3z33XfYunUr7r//fly8eFF1YRN33HFHpmYFL7/8sppbs2fPHtWO+rbbblPtpe+++24UN7UqlELzutXwQNqjsBuswM4ZwKL3inQfzqfYMGzcCmw+koizSTbc/9MavPTXZqTaHUW6H0REVHAmd0bHxECHiCjwgc5NN92kGgq88MILaN68OdatW4fp06enNyg4cOCAWivH48yZM6odtczL6dOnj6r3XrJkiWpNXRzdfkV1bHHVwGu4U7ti7mvA5ilF8topNgdGfL8KGw6dQ9mSVrUvYvySfbjhs6U4cIqThomIihOTS5ujw9I1IiL/y9cEj4ceekhlZaQF9PLly9GuXbv02+bNm4fx48enX37//ffT75uQkICpU6eqOTrF1TUNY1A5ugTGJV+JfVUHAi4nMGkoMOEm4PSeQntdm8OJhyaswbI9p1EqzIzvhrfFKwObYNywNoiOsGDj4XPo+9FClrIRke5J5842bdogMjISFStWxMCBA7F9+/ZM90lJScGDDz6IcuXKoVSpUrj++usvm39aFMwuZnSIiApL4c1kD1EmowG3tqsmPXLwv+ThQMdHAaMZ2DEdGHsFMOc1IM2/mRWn04Unf9uAWVuPI8xsxDdDW6NpldLqtq4NKmLaI1eiVfUyOJ9qZykbEene/PnzVRCzbNky1a1OutZJ104ps/Z47LHH8Pfff2PSpEnq/keOHMGgQYOKfF8t7oyOJYyBDhGRvzHQyYeb2lSF1WTE6kMXsaHhKOD+pUCtLoCsh7DgLWBsO2Dr34DLVeDXcrlceOnvzZi89jDMRgM+u60l2tUql+k+laJLYOI9V+C+q2pnKmU7eJqlbESkP1JOPWzYMLUwdXx8vKoykLJqWfBayJpu33zzDd577z1cffXVqsnOuHHjVFm1BEdFyeLO6JitJYr0dYmI9ICBTj6ULxWGPk21dtrfL90PVKgH3D4FuPEHoHRV4NwB4JfbgB8HASd3Fui13pu5Q72GwQC8e2M8rm6QebFWD2kz/XTvBplK2Z76fUOBXpuIKBRIYCPKltWWA5CAR7I83bpdWvi5QYMGqFatGpYuXVqk+2aBXTu1MqNDRFTsFgwNVbe3r4Ep647g7/VH8H99GqJMSSvQ6FqgTjetE9viD4Hdc4BPWgMxTbSMT82rgOodgLBSXr3G1wv34OM52npDLw9oggHNK+f5GCll+/Xe9ujx/gKs2HsaF1PtKBnGHzMR6ZMsgTBy5Eh07NgRTZo0UdfJfFFZLiE6OjrTfaWpjtyWHZlnKlvWhVQlYJLNV/IYhwuwQitdg9mSr+fRE8/4cJy8w/HyDcereI2Xt6/LT8D51LJaNBrFRWHL0UT8uuog7nWXjcEaAVz9HBB/C/Dfs9rcnWObtG3pJ9p8niptLgU+ct50+Y9hxuYEvDp1qzr/RM/66R3WvFEvJhJVypTAoTPJWLX/DK6qV8F/b5yIqBiRuTqbNm3CokWLCtzgYPTo0ZddL8snRERE5Os57U4JdLSMzoqVa5Cy5XiB9lEvZN4VeY/j5RuOV/EYr6Qk76ZnMNDJJ4PBgGEdauDJ3zfgq4V7cNsV1TNnTsrVBm79BbhwAti3ANgzD9gzHzi7HziwVNvmjQFimwG3TwZKls/0/D8tP6BOb7uiGh7o4g6ifNC+VjlMWn0IS3afZKBDRLokHUL/+ecfLFiwAFWqVEm/PjY2FmlpaTh79mymrI50XZPbsiPrw8k6chkzOlWrVlVNDqKiovL1beTkaTNhhfatZJeuV8NYvo7Pz6MnMmbyoap79+6wWCyB3p2gx/HyDcereI2XJ6ueFwY6BXBdy8oYO28X9p9KwrjFe/HQ1XUvv1OpCkCT67VNnN4L7J2vBT27ZgMJG4DxfYE7/gIiY9LXy1m255Q6f/sVNVRQ5asOdbRAZ9lu7XmIiPRCmrg8/PDDmDx5slryoGbNmplul+YDcmCePXu2aistpP20NCxo3759ts8ZFhamtqzkefJ7kJeMTpg7oxNWopQ8Wb6eR28KMuZ6xPHyDcereIyXt6/JZgQFIA0ARnWvp85/sWAPzia5a61zU7Ym0GoYMHgcMGIOEFkJOLFNC3YStTVwJMhJtTsRVzoc9WK8m8+TVftaWoZImhKcS2a9KRHpq1ztxx9/xIQJE9RaOjLvRrbk5GR1e+nSpXHXXXepDM3cuXNVc4Lhw4erIOeKK64osv20OVwIM7j/P5vZjICIyN8Y6BRQ/2aV0CA2EudT7Ph8vo8LhkqZwvCpWqe2UzuB8X2Ac4cwb/sJdXOX+hXylc0RsaXDUat8SThdUE0JiIj04rPPPlOd1rp06YK4uLj07Zdffsm0mHW/fv1URqdz586qZO2PP/4o0v10OLVsjmK2FulrExHpAQOdAjIaDapZgBi/ZC+OJ6b49gRlawHDpgLR1YDTe4BxfbBt2yZ101X1KhZo39rX1tbbWcryNSLSWeladpusreMRHh6OsWPH4vTp02ohUQlycpqfU2gcGQId0+VlcUREVDAMdPzg6gYVVRe2FJszvR20T8pUB4b/C5SpqZoVvHPxWdQyHkfHOpkXBs1voCMNCYiIKLi4HBnKik3M6BAR+RsDHT+Q8rInezVQ539ecQAHTnnX8i6T0lWA4dNwLqI6qhhOYlL4q4i8qHVey68rammBzraE8zh90Yv5Q0REVGRcLi2jkyZ9gYw8HBMR+Ru7rvmJBBWd61XAgh0n8P6sHXj/pua+P0lUJbxQ9i08dGEU6uIwMK43UL8PEBZ5abOWcp8vBURXB8rVkUgr26crXyoM9WMisf3YedXgoE/TuIK/USIi8mvpmt1gAfM5RET+x0DHj57sWV8FOlPWHcZ9V9VG/dhInx4vbaX/2+/CIttzWFzpQ4Sf3gasHpf7g8rVBRr217ZKLS4LeqR8TQIdmafDQIeIKPhK1+xgK1siosLAQMePmlQujb5N4zB141G8M2M7vrqjtU+PX773tJrnEx0Vg7AR04FNvwMXTwKp5y9taRcunT+5Q+vWtug9bYuqcinoqXYFYDSpQGf8kn2cp0NEFGQMTi3QsRmZzyEiKgwMdPzsse718O+mo5i55RjWHDiDltXKeP3YeduPq9Or6lWAoUQZoM3duT8gJRHYOQPY+jewcyaQeAhY/pm2lawAVG6FLpHVcYcpFftPxeDU/hiUq1IHMPHbQyKigHMHOnYDAx0iosLAQMfP6lQshRtaVcGvqw7h7enbMWFEO6/XwpmfYf0cr4RHAU1v0DZbMrB7LrD1L2D7NODiCWDHdEjD0pc9cc24NwGDSWtlXb4uULeHlv2JLOKWqkREBLjX0XEY+eUTEVFhYKBTCB7tVg9T1h7B0j2nsHjXKXSqWz7Px0intj0nL8JsNKCjF/e/jKUE0KCPtknd98HlwIntam2e7Vs3wHV6L2qZjsPqSgXO7NU2yQZNe0Irc2t4rRb0RFeF3jicLtzz/So1vemL21vDZMzfIq1ERL4wujM6DmZ0iIgKBQOdQlA5ugRuu6I6vl28F2//tw0d63TMM6szf4dWttayehlEhRfw2z0pTavRSdsAHKl+HMPHr0T1MuGYf39D4PRe4PAqYMtf2umBpdr23zOq3M1Yvy9KpURI71Powe4TFzB7mzb+c7cdR7dGMYHeJSLSA3eg4zRysVAiosLAxv2F5IGutRFhNWH9oXP4c92RPO8/z9eyNR+0qVlWZSn2n0nBIUc0UKMj0PFRYMRs4LHNQK83gGodZGoscHg1THNexjVbn4b5g4bAxCHAkk/U9SpTlAdZr2f9wbMochKU7V+qNW/w0cZD59LPj1uy1887RkSUPaPToU6dnDdJRFQomNEpJLKGzYNd6+Dt/7bj1albVAATHWHNsa30kt2n1Pku9Sr6fV9KhZkRX6U01hw4q9pMD24dkXmh0ivu17bzx4Btf8O55S+49i2BKekksO0fbROWkkCV1kC19tocn6jKQOnKQGRceoODEd+vwur9Z/Bg19p4vEd9r+cnFci5w8DfjwC7ZgHhpYHebwPNbsxxfaGsNh25FOhIqeGOY+dRL8a31uBERL4yuDwZHZauEREVBgY6hWjElbUwZe1h7Dx+AW9O34Yxg5ple7+V+04j2eZATFQYGsYVzgdsaTOtAp09EujkMA8nMkZ1enM0H4rp//yJ3s3jYD680l3atgxIOQvsna9tGRmMQKkYJJeIxZ1HLehuroit86vhs3MdcP/1vWDw8ttK6VL317ojGNG5lir/8yqLs34i8O9TQKo7WEk5B0y+B9jyJ9Dvfe095WHTYe2xkWFmnE+1Y9zifRgzqKlX+0xEVNA5Ok4TS9eIiAoDA51CZDUb8fqgphj8+VL8vOIgBrWsgjY1yuZYtqbaShdSBqRD7fIYO3e3yui4XK48X8dptMBVpS1QsyOAkYDTCZzYpgU9h1YBZw9o7awlmyIH6/NHUeL8UfQ1ZXiSLZ/CttUKc1wTGGKbArLFxWtd3yTzYg5XWRen04UvFuxRaw9JY4DtCefz7lYn2ae/HwV2/KtdrtQSGPAJsG0aMP9NYPtU4MASoM87QJPrc8zuyOttPpKozj/VuwGem7IJk9cewlO96ueYgSMi8gejS+u6BgY6RESFgoFOIZPA5uY2VTFx5UE8+8dGTH3kShUAZbd+Tpf6/i9b82hVvQysJiOOnkvB/lNJqFG+pG9PYDQCMY20rc1dl66XAOjiCTjPHsJz309HWNJRDK3vQMkzW1Hi9FaUQgpwZI22ZWWywhkWhWNp4WifFoZxpgicNZXCjv1VsHHufjRr3VlrfZ0xSJEsjiykOu1xIPkMIG1Zuz4DdHgUMJmBmMZA/d7AlPuAhI3A73dp2Z2+7wGlLp//tPfkBSSlORBuMaqf04TlB7DlaCJ+WXkQ915V27cxIiLygdFduuYy80sVIqLCwECnCDzdu4FaQFRK2L5auEfN3fE4eDoJu09cVM0COtbJR1tpL4VbTGhRLRrL955W84F8DnRyC4AiY7DihBkTzjdHZFhrPHVLN/V6f687hA9+/Q/1sQ/9K55Cz/LHYTy2CbhwDHA5AUcajEknEQcgLmPsJ1mhBZOABQBKVgTimgGxsjUFNk/W1goSct11n2vBTUaxTYARc4GF7wIL3tbuv3+xlt1pfF2mwGnTYS2b0yguCmaTEcM61sCTv23A90v3465ONVW7b5XB2j1Hm59U+2qv5/545dhm4PAa7XllvhMR6YbJXbomX/oQEZH/MdApAlIC9Xy/Rhj5yzp8NHsn+jWLQ/VyWqAxb4dWttayWjRKlyjczjsyT0cCHZmnc2u7an597slrDqvTPk3jVJAj+jevgnBrPzz40xpMS3CiS+kK+PyRVjAbgM9mrsfPCzYhEhfRqIwL/+sciyol0pB85ihmzZuN+s49qGs8CsPF41qTAdk8jGag85PAlaPSmyBcRq7v8rSW3Zl8P3B8M/DbcGD2aKDZTdpWrjY2uufnNK1cWp1eG18Jb/y7DafPnsG2qR+jyZFJWmbIo1IL4Mr/AfX7akFefqRe0LJSa77TutkJcwmg4yNaNzyrn4LQQEpJ1NZ2YjcpohyZ3BkdVcZLRER+x0CniAxoXgm/rT6ERbtOqnkg39/ZVs1BmV8EZWsZ5+l8MGsnlu4+6dU8HW9J17hpG4+q89e1zJyV6N4oBt8Ma626sclcpGHjVsDpAlbsPQ2gHG5q3RwvXdsYJaxacCQtCI4YrsXD/25DjSgD/ru1HMJObAISNgBH12tze7q/rM318Ybc7555wIK3gKWfAmf2aXN4ZKvcGpXOt0UZNEMTd6ATfmYnvq7wC+rY/0HUmuRLH0Jqdgb2LQKOrAV+uQ2o0ADoNEqb/+MNKbmT8r3V32lBTtqFS0FbmZrAqZ3aPsnt17wAxN+S/0AqkOxpwJKPtExaiTJasNn8Nq2s0O3nlQfxynITbJWO4IY21TOP0c6ZwLwxWllit5eARgP8m0HLi+xDUb4e6ZrJM0fHzDk6RIXF4XDAZst7eQy5j9lsRkpKinoMBXa8LBYLTKaME7/zh4FOEZGg4pWBTdDzgwVYuPMk/lp/BL2axKa3lZZGBIUtvmppNRfl5IU0VUbnrxbKs7ceV93KpFNa22yaLVxZtwK+v7Md7hy/Esv2SIADlLSaVKOGAc0vL9ca2qEGvluyD/vOpWDc/vK476oMc4LyQ+rfr34O6PQYsG0qsOEXrRTt8CrchVW4I8yElI1dgY0pwP5FaCmPMQB7nTEo0WEEYjvfBUSU1dboWf45sPxLrTGDdHeb+xqM7R+G0VkGsKcAySe1D+nJp92nZ7RSvc1/AscyZIbK1gZa3gE0vxUoWUGbRzTzBeDsfuDPB7TX6fmaFmAVF7KO0T8jtbER549qDSOWjgWueRFo0BcbDyfilanbYHMa8OLfW3FFnQqoUiZCCyJnvwwcXH7p+SYNBWp1Bfq8rbUzL0zys5XgbO2PQP0+QK8xQMnCKyUlyhjoGBnoEPmdfKGbkJCAs2fPen3/2NhYHDx4sGiWxijmXEUwXtHR0eo1CvL8DHSKUM3yJfFw1zp4d+YOvPLPFlhMRjURvkJkGBpXiir01w8zm1RzBAm0pPuavwId6VLmyVoZZU5LNtrWLIuf7m6He35YhbjSJfD+Tc3VeGRHSt9G9aiPxyetx9i5u3BT66ooU9IPNexSEibr68h2/hhOLp+AowvGo6lxHyz7Z11qlV2/Dz45fxXe3R2HwReq4S0JcoR88JWAqcPDwMpvtA/wZ/fD9O/j6AsjjOudub++dFZqdC3QcihQo1PmzEHjgVqZ3fIvtA/cksH6rr9WItdpJBBRTvvWV0rc5FTKwowF/6bDL5JOA7Ne0krxRER5LUiTIG/+W8DJHcAvQ+Co3BZfnhoIm6MGjAYXLqY58OXPv2F0qT9g2DP3Uvas7QjtfS7+EJDrP20PdHgI6PyE/8v60pKAZZ8Ciz4A0s5r1238Fdg9G+j9Vq4d+/weaDntWvMNf/9s5HdVAmjpSkhBxewuXTOwdI3I7zxBTsWKFREREZF3t1mnExcuXECpUqVgLI4VFUXMWYjjJUFUUlISjh/Xqp7i4mQ2d/4w0Cli91xVC3+uP4Jdxy/giUnrC72tdFZX1CqnAp0lu0+qzElBnbqQmt4e+7oWuU+mj68ajaVPX5NjMJSRPNfXC/dgW8J5Few8168R/CoyBovL34hH0+qhX1wiPmm2T5tPIiVjpSuj/f4zcH22BFPWHcHTvRuibMZAS8rnZH5Qu/uQtnI8kue9j9I27Y8RBpNWspV1k7k9EmB5gqbsSAAj83QkyyPlW6vGaW2yZcuOlL1JQFCyHBBZCYiKA6IqXTovp/LasraQLP6adCrLdlr7cG0tpQUQYZEZzpcCwqK0xWDLVNcWh80aWEmZ18bfgP+eUZ33FAnipOTM8z7lvSz+CK6lY2E6vAIfYwVujGiD0zGdEXF4AbofXwkcd7+XVsOAKx/X9l099hZtjaSdM4BF7wMbJgG9XgcaXlvw4ENWpF/3EzD3dS3z5Glu0e5ercRR5nRJxz55f/3e08bV36RjoQRyq8dpbdGlQYc0y+jyDFChXsGe+8x+LYBb8wNgu6hdJwF6hfp+2XXyb6BjtDCjQ+RPUkrlCXLKlSvn9Qf3tLQ0hIeHM9AJgvEqUUJbT1GCHfk55reMjYFOEZOsymsDm+CmL5epb7RFl/qFX7bm0aG29gcvJWSyfo03QUdupm48CrvThSaVo1DXiwyRt68nXeikW92wcStVBzQJyqqWjYA/eRYKLVO9KdD1lky3SXOIZlVKY8Ohc/h5xYFMnfI8tp2246HljbH3/LuIxWlYS5XBt/dejZoVCpgpk8xR33eBNiOAOa9o5Vz2VK00zpF26X4SpEgWQjaZe1SYJBApXQWIrq4FPnIq5WaeTIzMWer3AVC9febHSVB4zfOYGt4Xif++ghtN83ClcyVwdCVgBBwuA/5GZ7S74y3E1WiQ+bFlawG3/gps/1cLeM4dAH69Qytnk/Iyef8uhxa0qPNO7bwEQZIBk3GU7JKUBsomQZ/cJoHTzBeBE1u115F1na5+QcveyD/rpjdqgZVk1mSdprGLgR6vaEGcP76QuHBcK5GTDFjWn9vmP4AtU7R9uOpJ1TDDJ0c3aHOkNv2hjY2Iaao1uZDxpKBigla6ZmCgQ+RXnjk5ksmh4svz85OfJwOdYqRdrXK4sXUV/LrqEORz/5V1ii7Qke5ipcLMOJdsw3dL96FH41g1tya//nB3W7uuRRX4m2S6JDCTeUzvzdyhyt38KWvHtYwkwzasQw2M+nU9fli6H/d0rqVKDT0p1Z+WH1Dlh6l2JypGRsBuN+LwBQNu+WoFfrn3ivSuegVSsQFw80+Zr5MP8p6gRzZbsvbB+fwRIPEokHjk0nk5TT4LlIjWPvirrbz7tKx2KgGMNEaQTbrBqfMXsXnfYZw9fQpVTadQ1XgKBmmDKx/KZdubYX+k5EZKyjo8os2FysbuExfw5H/HkWS/G84rHsCt58fBsH0anA36YdSJfvjzcCQ6zTqHH+7KpkGGXG7QB6jVRQs+Fn+gBVeeAMsXUpYoWaoUd722BD6y723uzjwZXN5Hl6e0MsM/H9Q648lcI8nu9H5Ty3xJyZtkStSpe5Pzsr/SKljGRZUahmkli/KckkWTAGfrP9oiuyKsNBB/s5bNksBk7hgtg7dhIrBxkpbVkg6DpSrl3gBC2qd7Sv08ZMzk5+LvlujkNxbJ6MivjIWla0SFgXNtijd//PwY6ATIM70bYt+pJMRXKY3SEUXXglfWipHytVlbj2H031vUVql0ONrULKvm78hcmurR3n27uOfEBaw7eFZlX6Qtc2H8gss49f9kESavPazWtfF0RysoyWZtdq+hk9Nz9m0Wh9enbUNCYgqmb0pA//hKKkB85o8NmLYxIT0b98Z1jTFr1iyM318au05cxC1fLsMv97b3ewZKkfIxa4S2efj6rb8X7n9rLg6oD/DA29c3xuB6Zq0cSuZ6nD2gnZdOah1H5vr60pHv4Qlr1Vy09rXK4ZY+7eBwdMG/U/9C777X4tGzqZj+4ULVjfDnFQdzbnsu7/fq/9OCAslYSAAnYyGBmpQLqvMm7bxkdiSokHkvUlInZXsyX0iulyBHAo8r7teaU0gQmJOKDYG7ZgLLPgPmvArsWwh81sEPowugShug1XCtVC3jz/KWCVpnPympk8yTBEbrf4Gx+RDUPOGAce4aIOkEcD5Ba3IhZXfyXjMGc/KcEuBU8u8XA+R/FvkDU8voMNAhIioMDHQCRCbX/3pvljKfIvLStY1Qo1wEVu47jU1HEnHkXAr+XHdEbSK6hAVNo4zoZnfCkksMJvNXRKc65VVDhcLQtEppFURJl7o3p2/DD3e188vzHjidpDrFWc1G1I0plWOZ4ZB21fDh7J0Yv2QfKpcpoT60Hz6bDIvJgKd6NcCdHWvC4bAj0gJ8P7w1bhu3Cnsk2PlKC3YKki0LlAOnktT4eHyz+ABuaH0lDFK6ho4+PZesSbTlaKKa4/TBzc1VUCxJKZdB+9dTq0IpPNGzPl6duhWvTd2CzvXKa13YciJBVf8PfX9TDtul4EfmHcm8Jm9I8CSNECSr9M8oYM88dzMICTZLuk8jtFPZ4HJn3FIBh/vUs8k3U/V6Aa2Ha4vf5kTmcw2ZBBxcobr6yWua1oxHM7lN6/txOUtJoMVtQPsHgDIFn3tHRcMsXdeY0SGiQlKjRg2MHDlSbXrFQEeH5IOkZ3L/xVQ71h44ixX7TmPl3tNYe/AMzibbsDDZiEd/WY9Pb2utgoGspHxrylqtbG1QlrVz/E0+CP+76ahqorBgxwl09kMrbk/ZWsPYyPSStOwMuaIaPp23C6v3n8Hgz5fC4XShWtkIfHxLC9VcQXjax0uw9/OIK3Dzl8uw96Qns3OF6jKXW2bpYpodkeHBs7Dmwl1aY4GGcVHYd/Kiaggh5YMd6/jWbnnmlmMqQBTvDo5HTFT2H+aGd6ypMmar9p/B079vxA93aWtM+ZU0mpCOZvntaibzW+6YUrTr7FRtC9zxJ6b/8zvsy7+EEU7Uq1UbdWrXufReSrlPS5Qtnusu6Zwno2MOY6BDRJouXbqgefPm+OCDDwr8XCtXrkTJkiGwCHkB8MiocyXDzOhUtzxGda+Hn++5Ahtf6omPb46H2eDCrG0ncP+Pq5Fqv3whKPngL9/6y3o4PRr5uSVuFlICdvsV2rfUY/7dpoIDfzUiyKsUrmJkOPo108ryJMjp1ywO/zzSKT3IyUo+zE8Y0U4FQzI+EuwcS0zJdJ+Dp5O0BgcT1qD1a7PQ9KUZ6vy5pLwXNCsKC3ecVKe9m8RicGtt7tU3izJOzMnbkbPJeOI3ravg3Z1qomuDnBfElSzP24Pj1RpPUsI2YcUBBEJSmh1fLtitfj45CkC998/Hq+Ih2yN4wDYSvXZei3kxd2jZmzrdgNgmWtMFBjnFjnxZZPUEOtbil/klosD977Db3YsN56FChQq6b8jAoyNlItmNXo1jMKKBE2FmI2ZvO457f1it5lpkJHNmRM8msShhLfz1XB66ug4iw8zYejQRU9Zpr10Qm47k3Iggq8d71kefprF464ZmKpMTlUf2RTI4EjRWKVNCzcOSMjZZa0jm9lz51hxc+dZcPPPHRkzdcBSnL2pd1OR8rw8XqLbfgWR3ONP34cq65VW2RT7bz9l2XDUV8IYEhCMnrsPZJJsa3yd7Zemmlg1ZU+mJntr9Xp+6Nfdgo5BIcwmZkzV8/MrLft8DRYKvpXu0OTh1opyqw+EDP61Rc+OoeEtzSKCjfVixcI4OEQEYNmwY5s+fjw8//FBVNsg2fvx4dfrvv/+iVatWCAsLw6JFi7B7924MGDAAMTExai2bNm3aqPnCWUvXPsiQGZLn+frrr3HdddepAKhu3br466+/vG7Zfdddd6FmzZqq9XPDhg3x+eefX3a/b7/9Fo0bN1b7KevfPPTQQ+m3Scvve++9V+2ztKVu0qQJ/vnnHxQmBjqUrQbRLnx1ewv1LbuskzPi+1XpH/4kw/PPBm3tkUGF0G0tOzLH4/6u2qT3d/7bXqAPovJtyKY8GhFkJPNsPh3SCje2rup1SZU8RsrYpNGDzNl57Jf1arL9wdPJMBsNaFOjDEZ2q4vf7muP3+/voD7oHz2XgiFfL8eYaVuzzaIVhQ2HzyExxY6ocDOaVYlW+3WNOxszbrF3WZ3P5+9WpZCS7ZPAMLvSx+wM71BDjYu0XX/6jw3q5+QrGbctR7SfrS82HjqHiSsPqvOyxtX7M3cgGMjCvml2JypHh+P+hk50rF1ONXa4c/xKrwNPCk5pdkeGjA4DHaLCphahTLPnuiWnOfK8T342b49nEuC0b98eI0aMwNGjR9VWtWpVddvTTz+NN954A1u3bkWzZs3UYp19+vTB7NmzsXbtWvTq1Qv9+/fHgQO5V0WMHj0aN954IzZs2KAeP2TIEJw+fdqrdXOqVKmCSZMmYcuWLXjuuefwyiuv4Ndff02/z2effYYHH3wQ99xzDzZu3KiCqDp16qQ/vnfv3li8eDF+/PFH9RzyfvLbNtpbnKNDOZIuWeOHt1UfqmR+zN3frcJXd7TG/B0nVPexmKgwtHevy1MUZOL/j0v3q+YJ4xbvw/1d8tdtTIIN2X+ryYh6Xqz9U5CSO8ns3PfjGvVPTua4SOMG6WwnJYMZ/fNwJ7w6dYsKhr5YsEeN90e3NEedioW3f9lZtFPL5si+SkmZuLNTTczaehy/rz6Mx3vUR3RE9m2kPQGDJ0gYPaAJapT3vjZY1lh6+4Z4ldlavOsUfli2H3e0935ivZQ0jvh+tZrH9UK/Rmq/vX3ci39tUtNvGsRGqjlJXy7cgx6NY9Cqei4LvBYByaSJLvUqwGy8gE9uicfQ8avV+k53fLMCfzzQIce5TxTcpDW91cCMDlFRSbY50OiF/wLy2lte7okIa94fuUuXLg2r1aqyLbGx2rSAbdu2qdOXX34Z3bt3T79v2bJlER8fn35Zgo7Jkyer4CJjFiW7rNEtt2hrB77++uv46KOPsGLFChUo5cZisaggyaN69epYsGCBCnxuvvlmdd2rr76K//3vf3j00UfT7yeZJiHZJnkdCdTq1dMWxa5Vq/DXd2NGh3J1hTvYkW/nZf6EBD0TV2rfFgxoXjn9w3BRCLeY8L8e2srun87dlV72ld+ytfqxkV5nG/JL1tP599ErMX1kZzzfr5Gaq5I1yBFy3ZhBzfDF7a1QJsKiOpX1/WgRfli6L1+ZDTFxxQHc9MVSn8rAFu7UGhHIvK2MAa80JpCDRG7zZ+SbsJG/rFXlVTK/5/p8NKmQwEhaiovXp21VLcy99e3ivSrI8XR72+z+OedFyjDXHDiLCKtJ/a4PalFZBT2PT9qg3lOgyM9dsqmiS33t5yFrYH07rI3qmijd/4Z+u0IF7VQ8A50waP/DDOy6RkR5aN26dabLktF5/PHHVQlZdHS0Kl+TICKvjE6zZqqHpyKNCqKionD8uPalWl7Gjh2ryudk7o887rvvvsPBg1o1hDzHkSNHcM0112T72HXr1qmMkCfIKSrM6FCeJAPx3Z1tMWzcyvT5AuK6FoXbbS078poyMV4CgY9m78RL1zbOd8c1f63J4089G8eiRdVoPP7bBvWh/fk/N6sPu+/d2Nyn9ZbGzt2Ft//brs5/Om83xgzKpZ2x2/kUm+rAJzrXvdTZTsr1pKHA/yatx3dL9uHuTrWyDRDf+Hcrdp+4iIqRYXj9uqb57px2+xXVVcc2Cawf+3U9fr+vvVr/KTdSrvbWdO39Vi1bQmXtHp24Dn8/1CnXOWTynqXBhXj46rqILR2OF/s3xuLdJ1XnvLf+26YuB8KOYxdUMCNz5drVKIu5u7Try5cKU23WB322RGWfpKz0+zvbqi8CqPhItTlR1j1HR63tRESFqoTFpDIrOZHSqvOJ5xEZFQmjnxu8yGsXVNbuaRLkzJw5E++8844qD5N5MzfccAPS0tLyzMxkJMdqee95mThxonrNd999V5XXyf5IRkgCGCGvn5u8bi8szOiQV1rXKIvv72qrGgIIKfGRb/mLmpQ3PdtH+8b/x2X7Vfvj/HdcK/r990bFqHCMH9YGL/ZvpAIKaQhx7dhF2J5w3qssgKw35AlyxF/rDuNCat4dWpbtOa2yMZItyLrYqSyWKu2zjyWmYtpGbX5WRlLO+N3S/eq8dFCTdaLyS5WwDW6GyHAz1h88qwK13Mh8rUcnrkWaw4luDStiygMd1b7KXBvJCuXm4zm7cPJCqpqLdGcnrUxOAso3r9e+8ZISyWUZgvtAlK11qF3usmBNfj7jh7dRGZ4Ve0+r9y9NIKj4kN9XzxwdmPP/90JE3pEP9FI+ltsm/2vzuk9+Nl+++JPSNZn4nxeZ6yJlaNJYoGnTpqrUbd8+bUmHwrB48WJ06NABDzzwAFq0aKGCq4yvFxkZqZofyJyhnDJJhw4dwo4dRTsHloEOea1ltTL48e52qhvXU73z7qRVWKSs6qp6FdSHcvnG3RcSCHgyOt50XAsU+bAvHc8mP9BBdW/bfyoJ1326GP9mE2RknGvywp+b8Zk7MPi/Pg1Rq0JJNbn/Ty861S3KpmzNQwKuO66ors5LRi1jOd2Zi2l4YpLWSnpo++rqZ1NQ0rnulQFN1HnJ3Mncn5xI84adxy+oTIcEKOVKheG9G7W6ZZnnM2vLsWwfJ4HQt+622TKnRxaI9ehSvyJubqNNAJU22bLeVFGbu10LdHJqzd24Uml8eUcrNdfsv83HMPrvzfkuc6Sil2pzINzgDnSY0SEiNwkWli9froKIkydP5phtkY5pf/zxh8qorF+/HrfeeqtXmZn8ktdbtWoV/vvvPxWsvPDCC1izZk2m+7z00ksq4yPzfnbu3Klu//jjj9VtV111FTp37ozrr79eZaL27t2rOslNnz4dhYmBDvlE1o+Rspmu9XNeF6UoPNOnAWR60LSNCWpNH29JKZC0PbaYDGqOTrCTD7NSftWxjtZt6/6f1qiuc1m/vZe20I9PWq8+2MsXR1I6NqJzLdzatpq6fcLyA3l+CJYGCOLKDGVrGQ25oroqo5JAceU+bczlOZ+dvBHHz6eidoWSeNo9v8YfBjSvpNp6S0D72K/rsu20N3fb8fRM0rs3xqsgx/MepNxOPPn7BhzPspaR7LcEBvLc0lUuu2Di//o2VN3zpAxuzL+5Z4b8TdZU8vxe5/a31qF2ebx/U3P1M/9+6X58uWBPEe4lFURaWobfSTMDHSLSSHmYdCJr1KiRmguT05yb9957D2XKlFFZFum21rNnT7Rs2bLQ9uvee+/FoEGDcNNNN6Fdu3Y4deqUajed0dChQ1U7608//VS1mO7Xr58KeDx+//131ZxAmiHI+3vyySe9yl4VBOfoULHUIDYKN7Sqgl9XHVLlSdKm2ZvUsKdsTbqtZfwGP5hJGdh3w9uqkrSvFu7FJ3N3qYn2H9zcAqVLWFRL5Ud+Xqu+1ZfmEJLNkEYR4vqWVfDWf9ux+Uii6tSV00Knh84kYc/Ji+rxOXXSkxbfg1pWVp3hvlm0R83d+mPNYfy7KUG1zP7w5hZ+XVNJfp6vDmyqgirJvkg5njR08DhxPjV9UVLpyJc1k/REr/pYsvuUms+l5hcNb6syZWLGlmMqsJNsSMbnzCgy3KLWTpKW3z8uO4BejeOyzXYVhoW7Tqhgtk7FUqpMzWbLueFA32ZxOHquIV6dulXNN4qLLoFr47VFbil42RjoEFE2ZLL+0qVLM10nJWrZZX7mzJmT6Tpp7ZzRviylbNl94Slr23hD1sUZN26c2oRkjxITE1UGJ2tAJFt2pFOcrLNTlJjRoWJrVPf6ap0f+eb7v80JvjUiqBS8ZWvZkcn4/9e3ET64qbnKqszdfgIDxy5WJV3S9luCHCkv+/y2VulBjidI6tMkNj2rk1db6eZVo3NdEFUCCk+gsGTXSbz412Z1+bHu9QqluYMEV29e3zS9ZM6zmKn8s37yt/U4eSFNzRd7spfWjS8jCWSlRbeMlwQ145Zo//AlMySLg4oRnWvm2gJb2mxLcwQhr5eYYivS+TlX51C2ltVdnWpiWAdtjtHjv64P2Lwi8p49NfXSBZauEREVCgY6VGxJh6wRV2o92N+cvh02R961qRs9C4VWKV6BjsfAFpXVAqNSUiVdwfp/skh9iJeOLuOGtUH3RjGXPebWdtoH9b/WH8nxg/rCXVoAIev85KZuTKTKnMiXQkPHrVBNDlpXL4P7rsrfmkbeuLpBDG5xl+A9MWmDeg9SoifBngR3kknKqeOYrEP0nDtj8+a/21R3NinvOnQmGbFR4Xigi7aQWW6e7t0A1cpGqPWbXnUHSIVJ5lrNT28rXcHr7Jdkpno2jlGT3O/5fhV2Hsu7eQUFjj0tWTuVwgo/d3giIvLVfffdp1pUZ7fJbcUV/7tSsXbvVbVRvpRVfejPLWPhyQJsLgaNCPIimZO/HuqIK2ppi1lKd7If726rsg/ZaVOjjCqBknVw/lx7eVMCKZFa7A50Otcr71X2QNgcLrW+kswRKez1lJ7r21AFGzLH6qEJa/HaVG3OzLO9G+Q51+q2dtXQraEWADw0YQ0+naf1aX62b8Ns1zTKSu7zzuB4NQ9GSiWXF3K2ZMPhczh1MU11OGxTw/sFS+VnIEFfy2rRSEyxq3bwWecmUfCwp2kZHZvB+7bxRESF5eWXX1aNDbLb5LbiioEOFWvSXvfRbtriUx/O3plradHRcynqA6R8IJRyp+JMJt1LU4gPb26Ofx7uhFbVy+b6bb+nKcFP2TQlkPk+0qBBPljHV8l+Dk9G0nXP01r8xWsbX9aKujBIsCFzjyTYkPWFZLFFySwNdZdr5Ubev5S/SctpmYeUYnOiXc2y6N8szuvXl/lInqzSB7MuTawszLK1K+uVhyWP9YOykszW10PbqHbZEhTe+d3KgHSMo7w57VoQamegQ0RBoGLFiqpldHab3FZcMdChYk/aAEsb5dMX0/DW9G05dhfzzM+pW7FUSCyuKB+CZT5O9XI5zzHxkKYEMldFFphce/Bstt3WpAlBXgtzegKH74a3waT72uPG1loL5qJay+nezlqJXLmSVrXWjrdrE0hg+O5greW0JJ9koVlfFzR9sGsd1a1PFs2VtWsKyzx3W2lpcZ3feU2yxo6M0abDiXhwwhrVlY+Ci8PdjMDBQIeIqNAw0KGQ+MD/rLutsXTHeujntUhOc+TYca04l63llyyCKd25RNYSv4Xu9XMkU+PLoqa+lFX5y/961MPoaxurbFbFyHCfHtu5XgWMG94G393ZNl+L3cq8qBtaVU1f26cwHD+forrj+TI/JzsS/H4zrI1q1jFv+wk8N2UT19gJMg6bO6NjZCMCIqLCwkCHQkK3RjF4Y1BT9Y371A1HceMXS5FwLiX7QKeYNiIoqCHttNKrfzYcwblkrcQvKc2evl5LTuvnBFtQK+VqjSr5Hqh41qQpyPt8oEtt1Up70a6TWL3f/1kdTxMCCcZ9DeSykg56H9/SUmWwfll1MD2AouDgtGlzdBxGZnSIiAoLAx0KGTe3rYYf72qnSnekTO3aTxZhnbtMS77NTu+4psOMjmhZrQzqx0SqOSqT1xxS1y3fc1o1FahSpgSqlyv8uTbFncxHkjJA8eFsramBP811l61lt4BpfkgXvlcGNlFtx3NaQ4kCw+UOdJxGa6B3hYgoZDHQoZDSrlY5/PlgR/WB/vj5VJXZ+XPdYRxLTMXJC6mqEUGjfJQthQLVlMCd1ZmwQmtK4JmfI1kOX+es6JXM1ZHfI2mKsPaAlg3zB2mPvnCH9vPoWoCytayGtKuOno21tZQoeDjtnowOAx0iosLCQIdC8lv33x/ooLUUtjvx6MR1eHzSenVbnQqh0YigIOvwyLyNHccuqJK1/MzP0btq5SIwqEVlv8/VWbXvDM6n2lUTAW+631Ex59ACHRcDHSLyoxo1auCDDz4I9G4EDQY6FLJtp7+8vRXu76J16ZI5FXouW/MoXcKC/s0qqfPvz9qBnccvqDkcHWqXC/SuFcusjixauj5LF7uClq1dVb8CjIW8LhEFXr1y2tyciAiWjBIRFRYGOhSy5MPiU70a4P2b4mE1a7/qzavqO9ARnvK1xbu0hS+bVolGdAS/VfZFjfIlMaC5FjB+PMc/WZ257vVzpGEChb5msVqziTJRpQK9K0REIYuBDoW861pUwR/3d8Co7vVwfSttIrmeSTeujO2VO7NsLV8e6lpHZcNmbT2e3tEvvw6eTlLZNckSdS4G3e+o4Az2NO2MuWDd9YgodHz55ZeoVKkSnM7Ma58NGDAAd955J3bv3q3Ox8TEoFSpUmjTpg1mzZqV79d777330LRpU5QsWRJVq1bFAw88gAsXLmS6z+LFi9GlSxeVfS5Tpgx69uyJM2e0+amyn2+//bZaVDQsLAzVqlXDa6+9hmDCQId0QUrWHrmmLiKsZuhdxqYEolMdBjr5UatCKVwbXynPuTrnU2x4d8Z2dHtvPm79ahle+mszflq+Hyv3nca5JFumsrVW1cqoNY9IP3N0YGI2lahIyFpiaRdz32xJed8nP5uX65gNHjwYp06dwty5c9OvO336NKZPn44hQ4aoIKRPnz6YPXs21q5di169eqF///44cCDz+njeMhqN+Oijj7B582Z89913mDNnDp588sn029etW4drrrkGjRo1wtKlS7Fo0SL1eg6Htlbh6NGj8eabb+L555/Hli1bMGHCBBWEBRN+6iPSoYHNK2HsnF2wmA1oUa1MoHen2Hro6rr4c/0RzNhyDFuOJGZa3yfF5sCPy/Zj7NxdOOMOaHYdv4Alu7WSQY+YqDA4nC6/tpWmYsDhzugw0CEqGhLEvK59OZXTN/+F1gbm2SOAtWSed5OMSe/evVXAIAGG+O2331C+fHl07dpVBSbx8fHp93/llVcwefJk/PXXX3jooYd83q2RI0dmamLw6quv4r777sOnn36qrnvrrbfQunXr9MuicePG6vTcuXP44osvVKA0dOhQdV3t2rXRqVMnBBMGOkQ6FBluwX+PdValV575S+S7OhVLoV+zSvh7/RGV1fn89lYqaPljzSF8MGsnDp9NVverVaGkKnWT26REbXvCeew8dh5HzqWo1uce3Roy0NENu7agsYula0SUgWRuRowYoYILKQf76aefcPPNN6sgRzI6L730EqZOnYqjR4/CbrcjOTk53xmdWbNmYcyYMdi2bRsSExPV86WkpCApKUmVqklGR7JM2dm6dStSU1PTA7JgxUCHSMcd2KjgHr66Dv7ZcATTNyfg20V7MXHlAdW+W8RGheOx7nXVIqNmkzHbsjYJfCToqRAZhroxkQF4BxTQjI6ZGR2iImGJ0DIrOZD5JonnzyMqMlIFFX5/bS9JaZiscyfBjMzBWbhwId5//3112+OPP46ZM2finXfeUfNiSpQogRtuuAFpae7/Jz7Yt28f+vXrh/vvv1/NqylbtqwqTbvrrrvU80mgI8+fk9xuCyYMdIiICqBeTCT6NInD1I1H8fI/W9KDyAe71sYd7Wvkum6TZNZaViujNtIZTzMCU1ig94RIH2RR7NzKx6QBgMWh3cffgY4PwsPDMWjQIJXJ2bVrF+rXr4+WLVumNwYYNmwYrrvuOnVZMjwSsOTH6tWrVXD37rvvpgd2v/76a6b7NGvWTM0Hkrk4WdWtW1cFO3K7lKwFKwY6REQFJI0uZm09po6jd3asiXuvqs2MGXlVusY5OkSUXfmaZFukScBtt92WKbj4448/VNZHGgtJE4CsHdq8VadOHdhsNnz88cfq+SSI+vzzzzPd55lnnlFd2aQbm8zdsVqtqlGClLNJBujRRx/F008/rYKzjh074sSJE2qfJSsULBjoEBEVUP3YSMx5vAsiLCaUKckPrpQ3Z7v7sTwxBm2aDkbOOT8i0qOrr75aBRLbt2/HrbfemqkdtLSZ7tChg2pQ8NRTT6m5NfkRHx+vnk+6pklA07lzZzVf54477ki/T7169TBjxgw8++yzaNu2rcrgtGvXDrfccou6/YknnlCtqV944QUcOXIEcXFxKiAKJgx0iIj8oHJ08ahXpiBRrg5ORDUBytQM9J4QUZCRUjIJHLKSzmjSAjqjBx98MNNlX0rZHnvsMbVldPvtt2e6fNVVV6lsT1aSSZL9lCDoueeeQ7DKVxHi2LFj1WBLqkoiuxUrVuR6/0mTJqFBgwbq/pICmzZtWn73l4iIiIiIyP+Bzi+//IJRo0bhxRdfxJo1a1TqS1ZJPX5cW/AuqyVLlqgUl9TryeJGAwcOVNumTZt8fWkiIiIiIsqDNDMoVapUtptnLRw98Ll0Ter5pL/38OHD1WWZuCQt8L799ls1ISmrDz/8UK3cKnV8nsWNpDXeJ598ctmkJyIiIiIiKphrr71WVV1lx2LRT7McnwId6ast7ehk0pKH1Od169YNS5cuzfYxcr1kgDKSDNCUKVPyu89ERERERJSDyMhItemdT4HOyZMn4XA4EBMTk+l6uSyrqmYnISEh2/vL9TmRlVZl8/B0lJA2eLL5yvOY/DxWjzhevuF4+YbjVbzGjD8nIiIqroKy65q0t8tucSJpcScrteaXlMyR9zhevuF4+YbjVTzGLCkpqchfk4jIH1wuV6B3gQL88/Mp0JGe3SaTCceOHct0vVyOjY3N9jFyvS/3F1Ial7HcTTI6VatWRY8ePRAVFYX8fCMpHxC6d++uq7rE/OJ4+Ybj5RuOV/Eas/yu0UBEFCie/5PyRY2s/ULFk+eLtoIc93wKdGRF1FatWmH27Nmqc5qnj7Zcfuihh7J9TPv27dXtI0eOTL9ODthyfU7CwsLUlpW80YK82YI+Xm84Xr7hePmG41U8xizUf0ayXMLbb7+tyqmli6isEi4L4xFR8SVfykdHR6d3BJZqIIPBkOtj5POszEVPSUlR888JARsvyeRIkCM/P/k5ys+zyErXJNMydOhQtG7dWh0MPvjgA1y8eDG9C5usqFq5cmVVfiYeffRRtdjQu+++i759+2LixIlYtWoVvvzyy3zvNBERUUF5lkuQDqDSnUiOZ9IsR1Yjr1ixYqB3j4gKwFM5lNPyJ9l9uE5OTlYZoLyCIkKRjJcEOblVgBVKoHPTTTfhxIkTeOGFF9Q3YM2bN8f06dPTGw4cOHAgU2TXoUMHTJgwQa2aKqun1q1bV3Vca9KkSYF2nIiIqCB8XS6BiIoP+fAdFxenvrTwpqmK3GfBggXo3LlzyGey/aGwx0uesyCZnAI1I5AytZxK1ebNm3fZdYMHD1YbERFRMMjPcglEVPzIh2VvPjDLfex2O8LDwxnoeKG4jFdQdl0jIiIqTL4ul8BlDwKPY+YbjpdvOF7Fa7y8fV0GOkRERHngsgfBg2PmG46XbzhexWO8vF36gIEOERHpjq/LJXDZg8DjmPmG4+UbjlfxGi9vlz4wF6cFg/K7noP8MCTyk8fzlzdvHC/fcLx8w/EqXmPm+b8bagvv+bpcQtZlDzzjIV2H8vMz8fxM5fFS505545j5huPlG45X8RoveV1vjk3FItA5f/68OpVvz4iIKDD/h0uXLo1QktdyCbnhcYmIKPiPTcUi0KlUqRIOHjyIyMjIfPXq9pQYyHPkp8RAbzhevuF4+YbjVbzGTL4tkwOJ/B8ONXktl5AbHpeKHsfMNxwv33C8itd4eXtsMrhCrR4hhx+GRHvnzp3jL68XOF6+4Xj5huPlO45Z6OHP1HccM99wvHzD8QrN8bq0sicREREREVGIYKBDREREREQhRxeBjnTKefHFFzN1zKGccbx8w/HyDcfLdxyz0MOfqe84Zr7hePmG4xWa46WLOTpERERERKQvusjoEBERERGRvjDQISIiIiKikMNAh4iIiIiIQo6uAh1Z1G3KlCmB3o1ig+NVMPv27VNjuG7dukDvSrHA8fLdvHnz1JidPXs20LtCBcD/td7jWBUc/9f6huNVvI9NIRfojB07FjVq1EB4eDjatWuHFStWBHqXgtZLL72kfhEzbg0aNAj0bgWNBQsWoH///mrV3ewOrtLHQ1ZUj4uLQ4kSJdCtWzfs3LkTepXXeA0bNuyy37devXpBr8aMGYM2bdogMjISFStWxMCBA7F9+/ZM90lJScGDDz6IcuXKoVSpUrj++utx7NixgO0z5R+PTd7hcSlvPDb5hscmfR+bQirQ+eWXXzBq1CjV7m7NmjWIj49Hz549cfz48UDvWtBq3Lgxjh49mr4tWrQo0LsUNC5evKh+h+QDSnbeeustfPTRR/j888+xfPlylCxZUv2+yT8APcprvIQcPDL+vv3888/Qq/nz56sDxbJlyzBz5kzYbDb06NFDjaPHY489hr///huTJk1S9z9y5AgGDRoU0P0m3/HY5Bsel3LHY5NveGzS+bHJFULatm3revDBB9MvOxwOV6VKlVxjxoxRl+XtTp48Of32F154wRUbG+tav369S49efPFFV3x8fI63c7xyHgun06nG4u23306/7uzZs66wsDDXzz//rC7v3btXPW7t2rXqst1udw0fPtxVv3591/79+116Gi8xdOhQ14ABA3J8jJ7HSxw/fly9//nz56f/PlksFtekSZPS77N161Z1n6VLl6rLc+fOVZfPnDmjLl+8eNHVq1cvV4cOHdKvo8Djscl7PC75hscm3/DYpL9jU8hkdNLS0rB69WqVovUwGo3q8tKlSzPdV37XH374YXz//fdYuHAhmjVrBr2SdLakc2vVqoUhQ4bgwIEDl92H43W5vXv3IiEhIdPvW+nSpVVJStbfN5GamorBgwerGl8Zw2rVqkGvdbuSCq9fvz7uv/9+nDp1Ktv76XG8zp07p07Lli2rTuX/mXyTlvF3TEp4ZCyy+x2TWuju3bvD6XSqb+Gio6OLcO8pJzw2+Y7HpfzjsSl/eGwK3WOTGSHi5MmTcDgciImJyXS9XN62bVv6Zbvdjttuuw1r165V6fDKlStDr+Qf3/jx49UftqRqR48ejSuvvBKbNm1StZmC45U9OZCI7H7fPLd5XLhwAX379lX/IOfOnasOOnokpQGS2q5ZsyZ2796NZ599Fr1791b/GE0mk67HSw4AI0eORMeOHdGkSRN1nfweWa3Wyw4K2f2OyeWbbroJdevWxYQJE9TjKDjw2OQbHpcKhscm3/HYFNrHppAJdLwldYVhYWGq9rB8+fLQM/lD9pBvw+QAU716dfz666+466671PUcr4K75ZZbUKVKFcyZM0dNDNWrm2++Of1806ZN1e9c7dq11Tdp11xzja7HS+qh5YNcfuciyLdlbdu2VXNBMh6Yqfjg/1oNj0tFR4//a7PDY1NoH5tCpnRN/tnJIGbt+iCXY2NjMw364cOH8d9//wVgL4ObROf16tXDrl270q/jeGXP8zuV1++b6NOnDzZs2JBtSlfPpCxF/m4z/r7pcbweeugh/PPPP+obQjmIesjvkZQ9ZW3Pmd3vmHzLKJ2FtmzZUmT7Td7hsalgeFzyDY9NBcdjU2gdm0Im0JF0WKtWrTB79uxMKTe53L59+/Trrr32WpU+u/vuuzFx4sQA7W1wkrSspG2lJaUHxyt7kuKWP+iMv2+JiYmqw03G3zch9b5vvPGGGkvpTkKaQ4cOqTrojL9vehovmWMgB5LJkyerbwjldyoj+X9msVgy/Y5Ji0+Zr5D1d0zGa+jQoerbRwY7wYXHpoLhcck3PDYVHI9NrtA6NrlCyMSJE1VnkfHjx7u2bNniuueee1zR0dGuhISEy7ptSLeI8PDwTF0j9OZ///ufa968eaqjyOLFi13dunVzlS9fXnXYEHofr/Pnz6suK7LJWLz33nvqvKfLyhtvvKF+v/7880/Xhg0bVNeWmjVrupKTk7Pt1PL++++7SpUq5Vq4cKFLb+Mltz3++OOqI4uMy6xZs1wtW7Z01a1b15WSkqLL8br//vtdpUuXVn+DR48eTd+SkpLS73Pfffe5qlWr5pozZ45r1apVrvbt26vNI2tnm5EjR7piYmJUBxwKHjw2eY/Hpbzx2OQbHpv0fWwKqUBHfPzxx2rwrVaraum5bNmyHNsK/vLLL+qf5O+//+7So5tuuskVFxenxqpy5crq8q5du9Jv1/t4ef5Qs27SitLTxvP5559Xf7zyIeaaa65xbd++Pf3xWf85infffdcVGRmpDuB6Gi/5B9mjRw9XhQoVVFvK6tWru0aMGJH+QU+P45XdWMk2bty49PvIB5MHHnjAVaZMGVdERITruuuuUwecnA4m4uGHH1Z/1xl/FynweGzyDo9LeeOxyTc8Nun72GRwvykiIiIiIqKQETJzdIiIiIiIiDwY6BARERERUchhoENERERERCGHgQ4REREREYUcBjpERERERBRyGOgQEREREVHIYaBDREREREQhh4EOERERERGFHAY6RH4ybNgwDBw4MNC7QUREpPC4RHrHQIeIiIiIiEIOAx0iH/32229o2rQpSpQogXLlyqFbt2544okn8N133+HPP/+EwWBQ27x589T9Dx48iBtvvBHR0dEoW7YsBgwYgH379l32jdvo0aNRoUIFREVF4b777kNaWloA3yURERUXPC4RZc+cw/VElI2jR4/illtuwVtvvYXrrrsO58+fx8KFC3HHHXfgwIEDSExMxLhx49R95eBhs9nQs2dPtG/fXt3PbDbj1VdfRa9evbBhwwZYrVZ139mzZyM8PFwdhORgM3z4cHWweu211wL8jomIKJjxuESUMwY6RD4eUOx2OwYNGoTq1aur6+RbNCHfpKWmpiI2Njb9/j/++COcTie+/vpr9W2akAOOfIsmB48ePXqo6+TA8u233yIiIgKNGzfGyy+/rL6Ne+WVV2A0MvFKRETZ43GJKGf8TSXyQXx8PK655hp1EBk8eDC++uornDlzJsf7r1+/Hrt27UJkZCRKlSqlNvlGLSUlBbt37870vHIw8ZBv2i5cuKDKC4iIiHLC4xJRzpjRIfKByWTCzJkzsWTJEsyYMQMff/wx/u///g/Lly/P9v5yUGjVqhV++umny26TumciIqKC4HGJKGcMdIh8JKn+jh07qu2FF15QpQKTJ09WaX6Hw5Hpvi1btsQvv/yCihUrqsmcuX3DlpycrMoMxLJly9S3bFWrVi3090NERMUbj0tE2WPpGpEP5Buy119/HatWrVKTPP/44w+cOHECDRs2xP+3d8coCkNBGIBn72BnY+MNrMU2Z0hpK7aW5gKCR7GyFXIIwcrG0sIbZHmvkG0Utlhchu9rJETEVI//vZnJZDKpjZyXyyXu93tt+GzbNkajUZ1oU5o+r9drrYFer9dxu92ev1sm2SyXyzifz3E8HmO73cZqtVIHDcBb1iV4zYkO/ELZ/er7Pvb7fZ1kU3bNdrtdNE0Ts9msLhbls5QGnE6nWCwW9fubzaY2ipZpOOPxuNZT/9xJK9fT6TTm83ltHC0TdLqu++izAvD/WZfgta9hGIY394E/Vt5X8Hg84nA4fPqvAIB1iTScPwIAAOkIOgAAQDpK1wAAgHSc6AAAAOkIOgAAQDqCDgAAkI6gAwAApCPoAAAA6Qg6AABAOoIOAACQjqADAACkI+gAAACRzTcv1zJlDHI9wwAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 1000x500 with 2 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.818553Z",
     "start_time": "2025-06-26T01:45:37.816716Z"
    }
   },
   "outputs": [],
   "source": [
    "# 导入所需库\n",
    "import os\n",
    "import pandas as pd\n",
    "from PIL import Image\n",
    "import torch\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "from torchvision import transforms\n",
    "import tqdm\n",
    "\n",
    "# 定义测试数据集类\n",
    "class CIFAR10TestDataset(Dataset):\n",
    "    def __init__(self, img_dir, transform=None):\n",
    "        \"\"\"\n",
    "        初始化测试数据集\n",
    "        \n",
    "        参数:\n",
    "            img_dir: 测试图片目录\n",
    "            transform: 图像预处理变换\n",
    "        \"\"\"\n",
    "        self.img_dir = img_dir\n",
    "        self.transform = transform\n",
    "        self.img_files = [f for f in os.listdir(img_dir) if f.endswith('.png')]\n",
    "        \n",
    "    def __len__(self):\n",
    "        return len(self.img_files)\n",
    "    \n",
    "    def __getitem__(self, idx):\n",
    "        img_path = os.path.join(self.img_dir, self.img_files[idx])\n",
    "        image = Image.open(img_path).convert('RGB')\n",
    "        \n",
    "        if self.transform:\n",
    "            image = self.transform(image)\n",
    "            \n",
    "        # 提取图像ID（文件名去掉扩展名）\n",
    "        img_id = int(os.path.splitext(self.img_files[idx])[0])\n",
    "        \n",
    "        return image, img_id\n",
    "\n",
    "# 定义预测函数\n",
    "def predict_test_set(model, img_dir, labels_file, device, batch_size=64):\n",
    "    \"\"\"\n",
    "    预测测试集并生成提交文件\n",
    "    \n",
    "    参数:\n",
    "        model: 训练好的模型\n",
    "        img_dir: 测试图片目录\n",
    "        labels_file: 提交模板文件路径\n",
    "        device: 计算设备\n",
    "        batch_size: 批处理大小\n",
    "    \"\"\"\n",
    "    # 图像预处理变换（与训练集相同）\n",
    "    transform = transforms.Compose([\n",
    "        transforms.ToTensor(),\n",
    "        transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
    "    ])\n",
    "    \n",
    "    # 创建测试数据集和数据加载器\n",
    "    test_dataset = CIFAR10TestDataset(img_dir, transform=transform)\n",
    "    test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=4)\n",
    "    \n",
    "    # 设置模型为评估模式\n",
    "    model.eval()\n",
    "    \n",
    "    # 读取提交模板\n",
    "    submission_df = pd.read_csv(labels_file)\n",
    "    predictions = {}\n",
    "    \n",
    "    # 使用tqdm显示进度条\n",
    "    print(\"正在预测测试集...\")\n",
    "    with torch.no_grad():\n",
    "        for images, img_ids in tqdm.tqdm(test_loader, desc=\"预测进度\"):\n",
    "            images = images.to(device)\n",
    "            outputs = model(images)\n",
    "            _, predicted = torch.max(outputs, 1) #取最大的索引，作为预测结果\n",
    "            \n",
    "            # 记录每个图像的预测结果\n",
    "            for i, img_id in enumerate(img_ids):\n",
    "                predictions[img_id.item()] = predicted[i].item() #因为一个批次有多个图像，所以需要predicted[i]\n",
    "    \n",
    "    # 定义类别名称\n",
    "    class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
    "    \n",
    "    # 将数值标签转换为类别名称\n",
    "    labeled_predictions = {img_id: class_names[pred] for img_id, pred in predictions.items()}\n",
    "    \n",
    "    # 直接创建DataFrame\n",
    "    submission_df = pd.DataFrame({\n",
    "        'id': list(labeled_predictions.keys()),\n",
    "        'label': list(labeled_predictions.values())\n",
    "    })\n",
    "    \n",
    "    # 按id列排序\n",
    "    submission_df = submission_df.sort_values(by='id')\n",
    "    \n",
    "    # 检查id列是否有重复值\n",
    "    has_duplicates = submission_df['id'].duplicated().any()\n",
    "    print(f\"id列是否有重复值: {has_duplicates}\")\n",
    "    \n",
    "    # 保存预测结果\n",
    "    output_file = 'cifar10_submission.csv'\n",
    "    submission_df.to_csv(output_file, index=False)\n",
    "    print(f\"预测完成，结果已保存至 {output_file}\")\n",
    "\n",
    "# 执行测试集预测\n",
    "img_dir = r\"D:\\cifar-10\\test\\test\"\n",
    "labels_file = r\"D:\\cifar-10\\sampleSubmission.csv\"\n",
    "predict_test_set(model, img_dir, labels_file, device, batch_size=128)\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
