{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "uGV2VjXF4pNs"
   },
   "source": [
    "# 查看FashionMNIST原始数据格式"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.363026Z",
     "start_time": "2025-06-26T01:43:29.447990Z"
    },
    "id": "3djTfPq64pNt"
   },
   "outputs": [],
   "source": [
    "import torch\n",
    "import torchvision\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from torchvision import datasets, transforms\n",
    "from deeplearning_func import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
    "from deeplearning_func import evaluate_classification_model as evaluate_model\n"
   ]
  },
  {
   "cell_type": "code",
   "metadata": {
    "id": "Fi46_oyAY6qD"
   },
   "source": [
    "import json\n",
    "token = {\"username\":\"cskaoyan\",\"key\":\"ff99d9d7ff71704e3e761217ceec03c5\"}\n",
    "with open('/content/kaggle.json', 'w') as file:\n",
    "  json.dump(token, file)"
   ],
   "execution_count": 3,
   "outputs": []
  },
  {
   "cell_type": "code",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "FBunE0OvY6ZY",
    "outputId": "88e36bc7-1d9b-4341-d913-0b27c01e9033"
   },
   "source": [
    "!cat /content/kaggle.json"
   ],
   "execution_count": 4,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "{\"username\": \"cskaoyan\", \"key\": \"ff99d9d7ff71704e3e761217ceec03c5\"}"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "qXgB8rdbZIDU",
    "outputId": "25b83728-5319-4488-e53a-d9f0d1189c01"
   },
   "source": [
    "!mkdir -p ~/.kaggle\n",
    "!cp /content/kaggle.json ~/.kaggle/\n",
    "!chmod 600 ~/.kaggle/kaggle.json\n",
    "!kaggle config set -n path -v /content"
   ],
   "execution_count": 5,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "- path is now set to: /content\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "!kaggle competitions download -c cifar-10"
   ],
   "metadata": {
    "id": "4feg3Y3_2IJC",
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "outputId": "ac1ec94f-a6c1-4424-8066-8f58593bffa5"
   },
   "execution_count": 6,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Downloading cifar-10.zip to /content/competitions/cifar-10\n",
      " 96% 689M/715M [00:07<00:00, 186MB/s]\n",
      "100% 715M/715M [00:07<00:00, 103MB/s]\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "!unzip /content/competitions/cifar-10/cifar-10.zip"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "QDeB7tM12b9K",
    "outputId": "ecc5c7d2-0c2b-4e44-b73f-946ad9d8d024"
   },
   "execution_count": 7,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Archive:  /content/competitions/cifar-10/cifar-10.zip\n",
      "  inflating: sampleSubmission.csv    \n",
      "  inflating: test.7z                 \n",
      "  inflating: train.7z                \n",
      "  inflating: trainLabels.csv         \n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "%pip install py7zr\n",
    "import py7zr\n",
    "a =py7zr.SevenZipFile(r'./train.7z','r')\n",
    "a.extractall(path=r'./competitions/cifar-10/')\n",
    "a.close()"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "NK7iEl7I2bRK",
    "outputId": "7c09a743-54e4-457c-b5b0-d9b05cb3f2c8"
   },
   "execution_count": 8,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Collecting py7zr\n",
      "  Downloading py7zr-1.0.0-py3-none-any.whl.metadata (17 kB)\n",
      "Collecting texttable (from py7zr)\n",
      "  Downloading texttable-1.7.0-py2.py3-none-any.whl.metadata (9.8 kB)\n",
      "Requirement already satisfied: pycryptodomex>=3.20.0 in /usr/local/lib/python3.11/dist-packages (from py7zr) (3.23.0)\n",
      "Collecting brotli>=1.1.0 (from py7zr)\n",
      "  Downloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.5 kB)\n",
      "Requirement already satisfied: psutil in /usr/local/lib/python3.11/dist-packages (from py7zr) (5.9.5)\n",
      "Collecting pyzstd>=0.16.1 (from py7zr)\n",
      "  Downloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.5 kB)\n",
      "Collecting pyppmd<1.3.0,>=1.1.0 (from py7zr)\n",
      "  Downloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.4 kB)\n",
      "Collecting pybcj<1.1.0,>=1.0.0 (from py7zr)\n",
      "  Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.7 kB)\n",
      "Collecting multivolumefile>=0.2.3 (from py7zr)\n",
      "  Downloading multivolumefile-0.2.3-py3-none-any.whl.metadata (6.3 kB)\n",
      "Collecting inflate64<1.1.0,>=1.0.0 (from py7zr)\n",
      "  Downloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.4 kB)\n",
      "Requirement already satisfied: typing-extensions>=4.13.2 in /usr/local/lib/python3.11/dist-packages (from pyzstd>=0.16.1->py7zr) (4.14.0)\n",
      "Downloading py7zr-1.0.0-py3-none-any.whl (69 kB)\n",
      "\u001B[2K   \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m69.7/69.7 kB\u001B[0m \u001B[31m4.6 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
      "\u001B[?25hDownloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.9 MB)\n",
      "\u001B[2K   \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m2.9/2.9 MB\u001B[0m \u001B[31m56.7 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
      "\u001B[?25hDownloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (96 kB)\n",
      "\u001B[2K   \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m96.4/96.4 kB\u001B[0m \u001B[31m11.5 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
      "\u001B[?25hDownloading multivolumefile-0.2.3-py3-none-any.whl (17 kB)\n",
      "Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (50 kB)\n",
      "\u001B[2K   \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m50.7/50.7 kB\u001B[0m \u001B[31m5.9 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
      "\u001B[?25hDownloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (141 kB)\n",
      "\u001B[2K   \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m141.3/141.3 kB\u001B[0m \u001B[31m15.8 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
      "\u001B[?25hDownloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)\n",
      "\u001B[2K   \u001B[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001B[0m \u001B[32m412.9/412.9 kB\u001B[0m \u001B[31m36.0 MB/s\u001B[0m eta \u001B[36m0:00:00\u001B[0m\n",
      "\u001B[?25hDownloading texttable-1.7.0-py2.py3-none-any.whl (10 kB)\n",
      "Installing collected packages: texttable, brotli, pyzstd, pyppmd, pybcj, multivolumefile, inflate64, py7zr\n",
      "Successfully installed brotli-1.1.0 inflate64-1.0.3 multivolumefile-0.2.3 py7zr-1.0.0 pybcj-1.0.6 pyppmd-1.2.0 pyzstd-0.17.0 texttable-1.7.0\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "!ls competitions/cifar-10/train|wc -l"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "rI5JDfji59q-",
    "outputId": "56707275-bdf6-4fc8-d8ce-d5e650bf6137"
   },
   "execution_count": 13,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "50000\n"
     ]
    }
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "vk4EQTiM4pNt"
   },
   "source": [
    "# 加载数据并处理为tensor"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:32.407799Z",
     "start_time": "2025-06-26T01:43:32.363026Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "nvguuJLl4pNt",
    "outputId": "34a599a2-04ed-4719-d8a4-291f38475487"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "完整数据集大小: 50000\n",
      "训练集大小: 45000\n",
      "验证集大小: 5000\n"
     ]
    }
   ],
   "source": [
    "# 加载CIFAR-10数据集\n",
    "import os\n",
    "import pandas as pd\n",
    "from PIL import Image\n",
    "from torch.utils.data import Dataset\n",
    "\n",
    "# 定义CIFAR-10数据集类\n",
    "class CIFAR10Dataset(Dataset):\n",
    "    def __init__(self, img_dir, labels_file, transform=None):\n",
    "        self.img_dir = img_dir\n",
    "        self.transform = transform\n",
    "\n",
    "        # 读取标签文件，read_csv默认读取第一行作为列名\n",
    "        self.labels_df = pd.read_csv(labels_file)\n",
    "        self.img_names = self.labels_df.iloc[:, 0].values.astype(str)  # 第一列是图片名称，确保为字符串类型\n",
    "\n",
    "        # 类别名称字典，使用字典可以提高查找速度\n",
    "        self.class_names_dict = {'airplane': 0, 'automobile': 1, 'bird': 2, 'cat': 3,\n",
    "                                 'deer': 4, 'dog': 5, 'frog': 6, 'horse': 7, 'ship': 8, 'truck': 9}\n",
    "        # 将文本标签转换为数字ID\n",
    "        self.labels = [self.class_names_dict[label] for label in self.labels_df.iloc[:, 1].values]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.labels)\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        img_path = os.path.join(self.img_dir, self.img_names[idx] + '.png') #图片路径\n",
    "        image = Image.open(img_path) #打开图片\n",
    "        label = self.labels[idx]\n",
    "\n",
    "        if self.transform:\n",
    "            image_tensor = self.transform(image)\n",
    "\n",
    "        return image_tensor, label\n",
    "\n",
    "# 定义数据预处理\n",
    "transform = transforms.Compose([\n",
    "    transforms.ToTensor(),\n",
    "    transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
    "])\n",
    "\n",
    "# 加载CIFAR-10数据集\n",
    "img_dir = r\"competitions/cifar-10/train\"\n",
    "labels_file = r\"./trainLabels.csv\"\n",
    "full_dataset = CIFAR10Dataset(img_dir=img_dir, labels_file=labels_file, transform=transform)\n",
    "\n",
    "# 定义类别名称\n",
    "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
    "\n",
    "# 划分训练集和验证集\n",
    "train_size = 45000\n",
    "val_size = 5000\n",
    "generator = torch.Generator().manual_seed(42)\n",
    "train_dataset, val_dataset = torch.utils.data.random_split(\n",
    "    full_dataset,\n",
    "    [train_size, val_size],\n",
    "    generator=generator\n",
    ")\n",
    "\n",
    "# 查看数据集基本信息\n",
    "print(f\"完整数据集大小: {len(full_dataset)}\")\n",
    "print(f\"训练集大小: {len(train_dataset)}\")\n",
    "print(f\"验证集大小: {len(val_dataset)}\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "id": "1akKUts84pNu"
   },
   "outputs": [],
   "source": [
    "def cal_mean_std(ds):\n",
    "    mean = 0.\n",
    "    std = 0.\n",
    "    for img, _ in ds:\n",
    "        mean += img.mean(dim=(1, 2)) #dim=(1, 2)表示在通道维度上求平均\n",
    "        std += img.std(dim=(1, 2))  #dim=(1, 2)表示在通道维度上求标准差\n",
    "    mean /= len(ds)\n",
    "    std /= len(ds)\n",
    "    return mean, std\n",
    "# cal_mean_std(train_dataset)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "HrTSD6iw4pNu"
   },
   "source": [
    "# 把数据集划分为训练集45000和验证集5000，并给DataLoader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.144223Z",
     "start_time": "2025-06-26T01:43:33.135368Z"
    },
    "id": "qK_zQ__r4pNu"
   },
   "outputs": [],
   "source": [
    "\n",
    "# 创建数据加载器\n",
    "batch_size = 64\n",
    "train_loader = torch.utils.data.DataLoader(\n",
    "    train_dataset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
    ")\n",
    "\n",
    "val_loader = torch.utils.data.DataLoader(\n",
    "    val_dataset,\n",
    "    batch_size=batch_size,\n",
    "    shuffle=False\n",
    ")\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "KUyAkERd4pNu"
   },
   "source": [
    "# 搭建模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "j17TXWWx4pNu",
    "outputId": "69251d29-61e5-4670-add4-f558616209e7"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "torch.Size([20, 100])\n"
     ]
    }
   ],
   "source": [
    "#理解每个接口的方法，单独写例子\n",
    "import torch.nn as nn\n",
    "m=nn.BatchNorm1d(100)\n",
    "x=torch.randn(20,100)\n",
    "print(m(x).shape)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "cFvbdkKd4pNu"
   },
   "source": [
    "# 搭建模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.152657Z",
     "start_time": "2025-06-26T01:43:33.148120Z"
    },
    "id": "UOfee2qW4pNu"
   },
   "outputs": [],
   "source": [
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "\n",
    "class NeuralNetwork(nn.Module):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "\n",
    "        # 第一组卷积层 - 使用Sequential组织\n",
    "        self.conv_block1 = nn.Sequential(\n",
    "            nn.Conv2d(3, 128, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(128),\n",
    "            nn.ReLU(),\n",
    "            nn.Conv2d(128, 128, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(128),\n",
    "            nn.ReLU(),\n",
    "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
    "        )\n",
    "\n",
    "        # 第二组卷积层 - 使用Sequential组织\n",
    "        self.conv_block2 = nn.Sequential(\n",
    "            nn.Conv2d(128, 256, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(256),\n",
    "            nn.ReLU(),\n",
    "            nn.Conv2d(256, 256, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(256),\n",
    "            nn.ReLU(),\n",
    "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
    "        )\n",
    "\n",
    "        # 第三组卷积层 - 使用Sequential组织\n",
    "        self.conv_block3 = nn.Sequential(\n",
    "            nn.Conv2d(256, 512, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(512),\n",
    "            nn.ReLU(),\n",
    "            nn.Conv2d(512, 512, kernel_size=3, padding=1),\n",
    "            nn.BatchNorm2d(512),\n",
    "            nn.ReLU(),\n",
    "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
    "        )\n",
    "\n",
    "        # 全连接层 - 使用Sequential组织\n",
    "        self.classifier = nn.Sequential(\n",
    "            nn.Linear(512 * 4 * 4, 1024),\n",
    "            nn.ReLU(),\n",
    "            nn.Linear(1024, 10)\n",
    "        )\n",
    "\n",
    "        # 初始化权重\n",
    "        self.init_weights()\n",
    "\n",
    "    def init_weights(self):\n",
    "        \"\"\"使用 xavier 均匀分布来初始化卷积层和全连接层的权重\"\"\"\n",
    "        for m in self.modules():\n",
    "            if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):\n",
    "                nn.init.xavier_uniform_(m.weight)\n",
    "                if m.bias is not None:\n",
    "                    nn.init.zeros_(m.bias)\n",
    "\n",
    "    def forward(self, x):\n",
    "        # 前向传播使用Sequential定义的块\n",
    "        x = self.conv_block1(x)\n",
    "        x = self.conv_block2(x)\n",
    "        x = self.conv_block3(x)\n",
    "\n",
    "        # 展平\n",
    "        x = x.view(x.size(0), -1)\n",
    "\n",
    "        # 分类器\n",
    "        x = self.classifier(x)\n",
    "\n",
    "        return x\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.185031Z",
     "start_time": "2025-06-26T01:43:33.152657Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "5Ll8FXqD4pNv",
    "outputId": "b9abde01-e362-4cb4-b7bd-9802dece9b6f"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "批次图像形状: torch.Size([64, 3, 32, 32])\n",
      "批次标签形状: torch.Size([64])\n",
      "----------------------------------------------------------------------------------------------------\n",
      "torch.Size([64, 10])\n"
     ]
    }
   ],
   "source": [
    "# 实例化模型\n",
    "model = NeuralNetwork()\n",
    "\n",
    "# 从train_loader获取第一个批次的数据\n",
    "dataiter = iter(train_loader)\n",
    "images, labels = next(dataiter)\n",
    "\n",
    "# 查看批次数据的形状\n",
    "print(\"批次图像形状:\", images.shape)\n",
    "print(\"批次标签形状:\", labels.shape)\n",
    "\n",
    "\n",
    "print('-'*100)\n",
    "# 进行前向传播\n",
    "with torch.no_grad():  # 不需要计算梯度\n",
    "    outputs = model(images)\n",
    "\n",
    "\n",
    "print(outputs.shape)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.203053Z",
     "start_time": "2025-06-26T01:43:33.199532Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "V8zEsAla4pNv",
    "outputId": "37d5ed89-07ad-4fdc-e70d-1aed25939a5c"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "需要求梯度的参数总量: 12979850\n",
      "模型总参数量: 12979850\n",
      "\n",
      "各层参数量明细:\n",
      "conv_block1.0.weight: 3456 参数\n",
      "conv_block1.0.bias: 128 参数\n",
      "conv_block1.1.weight: 128 参数\n",
      "conv_block1.1.bias: 128 参数\n",
      "conv_block1.3.weight: 147456 参数\n",
      "conv_block1.3.bias: 128 参数\n",
      "conv_block1.4.weight: 128 参数\n",
      "conv_block1.4.bias: 128 参数\n",
      "conv_block2.0.weight: 294912 参数\n",
      "conv_block2.0.bias: 256 参数\n",
      "conv_block2.1.weight: 256 参数\n",
      "conv_block2.1.bias: 256 参数\n",
      "conv_block2.3.weight: 589824 参数\n",
      "conv_block2.3.bias: 256 参数\n",
      "conv_block2.4.weight: 256 参数\n",
      "conv_block2.4.bias: 256 参数\n",
      "conv_block3.0.weight: 1179648 参数\n",
      "conv_block3.0.bias: 512 参数\n",
      "conv_block3.1.weight: 512 参数\n",
      "conv_block3.1.bias: 512 参数\n",
      "conv_block3.3.weight: 2359296 参数\n",
      "conv_block3.3.bias: 512 参数\n",
      "conv_block3.4.weight: 512 参数\n",
      "conv_block3.4.bias: 512 参数\n",
      "classifier.0.weight: 8388608 参数\n",
      "classifier.0.bias: 1024 参数\n",
      "classifier.2.weight: 10240 参数\n",
      "classifier.2.bias: 10 参数\n"
     ]
    }
   ],
   "source": [
    "# 计算模型的总参数量\n",
    "# 统计需要求梯度的参数总量\n",
    "total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
    "print(f\"需要求梯度的参数总量: {total_params}\")\n",
    "\n",
    "# 统计所有参数总量\n",
    "all_params = sum(p.numel() for p in model.parameters())\n",
    "print(f\"模型总参数量: {all_params}\")\n",
    "\n",
    "# 查看每层参数量明细\n",
    "print(\"\\n各层参数量明细:\")\n",
    "for name, param in model.named_parameters():\n",
    "    print(f\"{name}: {param.numel()} 参数\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "0XQuUiCe4pNv",
    "outputId": "8b972040-9501-414d-dd43-91c6d9ac75cd"
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "294912"
      ]
     },
     "metadata": {},
     "execution_count": 21
    }
   ],
   "source": [
    "128*3*3*256"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "1B2dFDE14pNv"
   },
   "source": [
    "# 各层参数量明细:\n",
    "conv1.weight: 288 参数 3*3*1*32\n",
    "conv1.bias: 32 参数\n",
    "conv2.weight: 9216 参数 3*3*32*32\n",
    "conv2.bias: 32 参数  \n",
    "conv3.weight: 18432 参数 3*3*32*64\n",
    "conv3.bias: 64 参数\n",
    "conv4.weight: 36864 参数  3*3*64*64\n",
    "conv4.bias: 64 参数\n",
    "conv5.weight: 73728 参数\n",
    "conv5.bias: 128 参数\n",
    "conv6.weight: 147456 参数\n",
    "conv6.bias: 128 参数\n",
    "fc1.weight: 294912 参数 128*3*3*256\n",
    "fc1.bias: 256 参数\n",
    "fc2.weight: 2560 参数\n",
    "fc2.bias: 10 参数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:33.217395Z",
     "start_time": "2025-06-26T01:43:33.203561Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "al9xZTJQ4pNv",
    "outputId": "92e47aaf-0503-47e1-a220-3cd67d705dd6"
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "OrderedDict([('conv_block1.0.weight',\n",
       "              tensor([[[[-0.0098,  0.0049,  0.0526],\n",
       "                        [-0.0460,  0.0700, -0.0565],\n",
       "                        [ 0.0166, -0.0242, -0.0285]],\n",
       "              \n",
       "                       [[-0.0129,  0.0448,  0.0041],\n",
       "                        [ 0.0410,  0.0016, -0.0666],\n",
       "                        [ 0.0140,  0.0255,  0.0212]],\n",
       "              \n",
       "                       [[-0.0347,  0.0046,  0.0639],\n",
       "                        [ 0.0655, -0.0134,  0.0402],\n",
       "                        [ 0.0584, -0.0683,  0.0070]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0291, -0.0437,  0.0425],\n",
       "                        [-0.0599, -0.0433, -0.0200],\n",
       "                        [ 0.0386,  0.0217, -0.0057]],\n",
       "              \n",
       "                       [[-0.0349, -0.0333,  0.0314],\n",
       "                        [ 0.0265,  0.0150, -0.0095],\n",
       "                        [-0.0032,  0.0275, -0.0483]],\n",
       "              \n",
       "                       [[ 0.0437,  0.0702,  0.0347],\n",
       "                        [-0.0313,  0.0429,  0.0698],\n",
       "                        [ 0.0582, -0.0465,  0.0650]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0668, -0.0508,  0.0649],\n",
       "                        [ 0.0461, -0.0351, -0.0189],\n",
       "                        [-0.0447,  0.0290,  0.0276]],\n",
       "              \n",
       "                       [[-0.0587,  0.0159,  0.0152],\n",
       "                        [ 0.0377,  0.0154,  0.0047],\n",
       "                        [-0.0632, -0.0555, -0.0050]],\n",
       "              \n",
       "                       [[ 0.0546,  0.0647,  0.0114],\n",
       "                        [ 0.0233,  0.0184, -0.0032],\n",
       "                        [ 0.0415,  0.0257, -0.0683]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[-0.0662,  0.0354, -0.0055],\n",
       "                        [ 0.0397, -0.0652, -0.0525],\n",
       "                        [-0.0294, -0.0346, -0.0144]],\n",
       "              \n",
       "                       [[-0.0376,  0.0143,  0.0418],\n",
       "                        [ 0.0048,  0.0121,  0.0111],\n",
       "                        [ 0.0554, -0.0075,  0.0260]],\n",
       "              \n",
       "                       [[-0.0065,  0.0044,  0.0010],\n",
       "                        [ 0.0244, -0.0070, -0.0638],\n",
       "                        [-0.0667, -0.0088,  0.0605]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0110,  0.0069, -0.0621],\n",
       "                        [ 0.0687,  0.0443, -0.0575],\n",
       "                        [ 0.0352, -0.0234, -0.0395]],\n",
       "              \n",
       "                       [[ 0.0357, -0.0256,  0.0510],\n",
       "                        [-0.0712,  0.0397, -0.0463],\n",
       "                        [ 0.0665,  0.0491, -0.0446]],\n",
       "              \n",
       "                       [[-0.0398, -0.0220, -0.0035],\n",
       "                        [-0.0209, -0.0231,  0.0200],\n",
       "                        [-0.0507, -0.0362,  0.0695]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0648,  0.0307, -0.0389],\n",
       "                        [ 0.0290, -0.0315, -0.0372],\n",
       "                        [-0.0571,  0.0364,  0.0371]],\n",
       "              \n",
       "                       [[ 0.0174, -0.0642, -0.0275],\n",
       "                        [ 0.0641, -0.0572, -0.0672],\n",
       "                        [-0.0588,  0.0326,  0.0146]],\n",
       "              \n",
       "                       [[-0.0140,  0.0588,  0.0427],\n",
       "                        [ 0.0043,  0.0466,  0.0609],\n",
       "                        [ 0.0041, -0.0434,  0.0202]]]])),\n",
       "             ('conv_block1.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1.])),\n",
       "             ('conv_block1.1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.1.running_mean',\n",
       "              tensor([-2.5182e-03, -2.7955e-03, -1.5788e-03,  4.3031e-03,  3.4034e-03,\n",
       "                      -4.9939e-03,  3.4257e-04, -5.9328e-03,  5.6099e-03,  2.9988e-05,\n",
       "                       5.5359e-03, -2.7297e-03, -1.9730e-03,  3.3358e-04, -1.7418e-03,\n",
       "                      -3.9453e-03,  5.5509e-03,  1.4524e-03,  2.3459e-03, -3.9476e-03,\n",
       "                       3.4182e-04, -4.9111e-03,  3.1099e-03, -6.5837e-03, -4.7605e-03,\n",
       "                      -2.1055e-03, -3.4292e-03,  3.9771e-04, -7.0108e-04, -1.6889e-03,\n",
       "                      -1.0769e-03, -5.2259e-04,  2.5871e-03, -2.7090e-03, -3.8915e-03,\n",
       "                      -4.4725e-04,  2.4061e-03, -1.1577e-03, -6.0825e-04,  3.4862e-04,\n",
       "                       6.3655e-04,  4.6481e-03, -6.7299e-03,  1.4581e-03, -7.5975e-04,\n",
       "                       3.8280e-03, -4.6412e-03, -3.7070e-03,  1.0021e-03, -1.3259e-03,\n",
       "                       1.9929e-03, -1.4798e-03, -4.9971e-03, -1.3249e-03,  1.5833e-03,\n",
       "                       6.0831e-04, -3.7204e-03,  4.9864e-04,  2.4141e-03,  1.7463e-03,\n",
       "                      -2.1976e-03,  2.9088e-04,  3.4373e-03,  1.6377e-03, -8.8479e-04,\n",
       "                      -2.6487e-03, -2.1137e-03, -2.7527e-03,  1.3938e-03, -1.8345e-03,\n",
       "                       3.1174e-04,  6.4996e-03,  4.3796e-03, -7.1163e-03, -2.1678e-03,\n",
       "                       5.1344e-03,  1.2791e-03,  4.2030e-03,  1.9926e-03, -1.6691e-03,\n",
       "                       7.0411e-04, -1.2399e-05,  5.6321e-03, -1.9487e-03,  4.1474e-03,\n",
       "                       1.7095e-03,  2.0096e-03, -1.2435e-04,  3.6592e-03, -1.5205e-03,\n",
       "                      -3.8355e-03,  3.7720e-03, -3.7109e-03, -6.7518e-04, -4.2070e-03,\n",
       "                       4.5308e-03,  3.9993e-03, -1.7525e-03,  6.3550e-03,  6.5667e-05,\n",
       "                      -1.0492e-03, -2.7463e-03,  4.1187e-04, -3.9064e-03,  1.7938e-03,\n",
       "                      -1.5059e-03,  4.6027e-03,  2.6018e-03, -2.3869e-03,  1.7053e-03,\n",
       "                       2.4704e-03,  7.7212e-04, -6.2809e-04, -4.0971e-03,  2.7209e-03,\n",
       "                      -1.1454e-03, -4.0508e-03, -3.5161e-03,  1.7973e-03,  1.6713e-03,\n",
       "                      -3.0750e-03,  1.4349e-05, -4.0190e-03,  7.0793e-03,  2.3679e-03,\n",
       "                       1.2257e-03,  8.9365e-04,  1.5252e-03])),\n",
       "             ('conv_block1.1.running_var',\n",
       "              tensor([0.9042, 0.9084, 0.9047, 0.9123, 0.9066, 0.9130, 0.9016, 0.9203, 0.9237,\n",
       "                      0.9032, 0.9164, 0.9072, 0.9023, 0.9017, 0.9025, 0.9097, 0.9212, 0.9059,\n",
       "                      0.9050, 0.9103, 0.9041, 0.9128, 0.9085, 0.9228, 0.9120, 0.9031, 0.9052,\n",
       "                      0.9019, 0.9023, 0.9027, 0.9012, 0.9011, 0.9083, 0.9051, 0.9056, 0.9005,\n",
       "                      0.9042, 0.9016, 0.9018, 0.9017, 0.9036, 0.9141, 0.9250, 0.9046, 0.9018,\n",
       "                      0.9097, 0.9086, 0.9120, 0.9031, 0.9042, 0.9037, 0.9029, 0.9165, 0.9022,\n",
       "                      0.9020, 0.9010, 0.9081, 0.9014, 0.9053, 0.9046, 0.9035, 0.9031, 0.9061,\n",
       "                      0.9015, 0.9026, 0.9041, 0.9045, 0.9063, 0.9022, 0.9036, 0.9017, 0.9281,\n",
       "                      0.9096, 0.9271, 0.9030, 0.9230, 0.9016, 0.9087, 0.9064, 0.9015, 0.9021,\n",
       "                      0.9025, 0.9183, 0.9030, 0.9155, 0.9041, 0.9029, 0.9009, 0.9070, 0.9014,\n",
       "                      0.9171, 0.9119, 0.9097, 0.9012, 0.9117, 0.9126, 0.9095, 0.9029, 0.9226,\n",
       "                      0.9021, 0.9011, 0.9075, 0.9023, 0.9123, 0.9028, 0.9038, 0.9139, 0.9060,\n",
       "                      0.9058, 0.9032, 0.9070, 0.9037, 0.9009, 0.9101, 0.9066, 0.9017, 0.9089,\n",
       "                      0.9065, 0.9021, 0.9036, 0.9102, 0.9031, 0.9094, 0.9250, 0.9033, 0.9026,\n",
       "                      0.9016, 0.9019])),\n",
       "             ('conv_block1.1.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block1.3.weight',\n",
       "              tensor([[[[ 1.0754e-02, -1.4991e-02, -4.7782e-02],\n",
       "                        [-2.8154e-02, -9.3349e-03, -3.5469e-02],\n",
       "                        [-1.9423e-02,  5.4348e-03, -3.1849e-02]],\n",
       "              \n",
       "                       [[ 2.5600e-02, -3.7141e-02, -7.3130e-03],\n",
       "                        [-4.4852e-02, -2.7069e-02, -4.8787e-03],\n",
       "                        [ 4.3217e-02,  1.0349e-02,  2.0256e-02]],\n",
       "              \n",
       "                       [[ 2.9773e-03,  2.7501e-02,  3.0099e-02],\n",
       "                        [-4.5165e-02, -4.4703e-02, -2.8067e-02],\n",
       "                        [-1.7528e-02, -3.9135e-02, -9.3383e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 6.4725e-03,  4.4085e-03, -5.5485e-03],\n",
       "                        [-4.5885e-02, -7.7275e-03, -6.2458e-03],\n",
       "                        [ 8.8602e-03,  3.2844e-02,  1.4179e-02]],\n",
       "              \n",
       "                       [[-3.3266e-02, -1.2891e-02,  8.8592e-05],\n",
       "                        [ 4.4252e-02, -3.7228e-02, -4.7079e-02],\n",
       "                        [-3.7642e-02,  5.0098e-03, -9.5495e-03]],\n",
       "              \n",
       "                       [[ 2.1336e-02,  3.8166e-02,  1.8606e-02],\n",
       "                        [-1.6681e-02, -3.5651e-02,  2.4635e-02],\n",
       "                        [ 2.5112e-02, -1.7875e-02, -4.9185e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 3.5264e-02,  2.8956e-02, -4.4898e-02],\n",
       "                        [ 4.7651e-02,  3.9556e-03,  3.1041e-02],\n",
       "                        [ 2.2892e-02,  4.8144e-03,  1.0636e-03]],\n",
       "              \n",
       "                       [[-1.8247e-02, -3.3915e-02, -2.6369e-02],\n",
       "                        [ 3.2149e-02, -3.0955e-02,  3.3490e-02],\n",
       "                        [ 1.1931e-02, -4.8367e-02,  2.9479e-02]],\n",
       "              \n",
       "                       [[ 1.4875e-02, -4.5784e-02,  1.6731e-02],\n",
       "                        [-4.3496e-02, -4.0670e-02,  1.5576e-02],\n",
       "                        [ 3.0999e-02,  5.6307e-03,  4.2016e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-4.6874e-02, -1.6508e-02, -4.2791e-03],\n",
       "                        [ 1.4554e-02,  1.7306e-02, -4.8118e-02],\n",
       "                        [ 1.6385e-02, -3.0185e-02, -4.1266e-02]],\n",
       "              \n",
       "                       [[ 1.4422e-02, -3.6444e-03,  1.0610e-02],\n",
       "                        [ 1.7522e-02,  1.8070e-02,  2.2527e-02],\n",
       "                        [-2.3959e-02, -2.9248e-02,  2.6040e-02]],\n",
       "              \n",
       "                       [[-4.7490e-02,  2.1760e-02,  1.4095e-02],\n",
       "                        [-4.2600e-02,  3.5226e-02,  3.0719e-02],\n",
       "                        [ 3.2399e-02,  4.5303e-02, -1.7787e-03]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 2.6525e-02,  2.5709e-02,  1.2038e-02],\n",
       "                        [ 2.4484e-02,  5.2543e-03, -3.1596e-02],\n",
       "                        [-2.5957e-03, -6.1648e-03,  2.9431e-02]],\n",
       "              \n",
       "                       [[ 4.4799e-02,  1.8048e-02, -3.4711e-03],\n",
       "                        [-4.5652e-02,  4.5296e-03,  3.3882e-02],\n",
       "                        [ 4.6138e-03, -6.8856e-03, -4.9557e-03]],\n",
       "              \n",
       "                       [[-3.3716e-02,  1.4189e-02,  5.7407e-03],\n",
       "                        [-2.2130e-02,  2.1025e-03,  8.3221e-03],\n",
       "                        [-1.9691e-02,  2.3031e-02,  3.9485e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 3.5904e-02,  8.3314e-03,  4.5423e-02],\n",
       "                        [-1.3058e-02, -3.4440e-02, -1.6000e-04],\n",
       "                        [ 4.3102e-02, -4.9305e-02, -1.0359e-02]],\n",
       "              \n",
       "                       [[-2.9444e-02, -2.9898e-03,  3.7209e-02],\n",
       "                        [-9.8201e-03, -4.9711e-02,  5.5643e-03],\n",
       "                        [ 3.3852e-02, -3.0365e-02,  2.5096e-02]],\n",
       "              \n",
       "                       [[ 3.4333e-02,  3.2479e-02, -3.6166e-02],\n",
       "                        [-3.2179e-02,  3.6145e-02, -2.8532e-03],\n",
       "                        [-4.6656e-02, -2.6045e-02, -2.4590e-02]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[-1.6810e-03, -2.4101e-02, -2.5302e-02],\n",
       "                        [-2.1496e-02,  4.1274e-02, -1.8747e-02],\n",
       "                        [-1.1437e-02,  1.0376e-02, -2.9841e-03]],\n",
       "              \n",
       "                       [[ 2.5702e-02, -2.0866e-02, -2.4005e-02],\n",
       "                        [ 4.9499e-04, -2.5917e-02,  2.9738e-02],\n",
       "                        [-2.3245e-02, -5.2051e-03, -3.4636e-02]],\n",
       "              \n",
       "                       [[-4.9052e-02,  1.4725e-02,  2.1608e-02],\n",
       "                        [-5.0377e-02,  4.0782e-02,  4.3347e-02],\n",
       "                        [ 2.8003e-02,  3.7912e-02,  3.8580e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 4.3279e-02,  3.8908e-02, -3.6493e-02],\n",
       "                        [-5.1120e-03, -4.9591e-04, -2.4944e-02],\n",
       "                        [-6.3172e-03,  4.6028e-02,  5.8114e-03]],\n",
       "              \n",
       "                       [[ 7.6532e-03,  1.5357e-02, -8.5988e-03],\n",
       "                        [ 1.2333e-02, -2.6314e-02, -1.3659e-02],\n",
       "                        [ 4.5832e-02,  1.1287e-02,  2.5714e-02]],\n",
       "              \n",
       "                       [[-4.2828e-02, -1.7516e-02,  1.6065e-02],\n",
       "                        [ 1.5008e-02, -3.9545e-03, -3.1187e-02],\n",
       "                        [ 3.8711e-02, -3.2684e-02, -3.2188e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 5.0540e-02, -3.3669e-02,  1.6783e-02],\n",
       "                        [ 1.9730e-02, -2.4725e-02, -4.1416e-02],\n",
       "                        [-2.9827e-02, -4.9451e-02, -4.3332e-02]],\n",
       "              \n",
       "                       [[ 1.3826e-02, -1.4543e-03,  1.7924e-02],\n",
       "                        [-6.8338e-03,  3.7167e-02,  2.5997e-02],\n",
       "                        [-4.4202e-02, -4.2391e-03, -4.4779e-02]],\n",
       "              \n",
       "                       [[-3.2824e-03,  2.0196e-02, -4.9605e-02],\n",
       "                        [-1.9905e-02, -4.9178e-02, -6.2811e-03],\n",
       "                        [ 2.8396e-02,  3.4284e-02, -1.8836e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 2.2557e-02,  3.4663e-03, -3.3722e-02],\n",
       "                        [-1.6555e-02,  2.4021e-02, -2.3919e-02],\n",
       "                        [-1.4198e-02,  3.7281e-02,  3.4431e-02]],\n",
       "              \n",
       "                       [[-1.4980e-02, -1.1715e-02,  6.8837e-03],\n",
       "                        [ 5.0661e-02, -1.5040e-02,  4.0878e-02],\n",
       "                        [-1.7498e-02, -1.4885e-02, -4.4864e-02]],\n",
       "              \n",
       "                       [[ 9.0076e-03,  1.2507e-02,  5.0159e-02],\n",
       "                        [ 2.8598e-02, -2.9992e-03, -1.5977e-02],\n",
       "                        [ 1.1751e-02,  1.8024e-02, -4.6464e-03]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 2.8523e-02,  6.9443e-03,  4.1416e-02],\n",
       "                        [-2.5737e-03,  2.4813e-02, -3.5789e-02],\n",
       "                        [-1.9026e-02, -1.2767e-02, -5.0288e-02]],\n",
       "              \n",
       "                       [[ 4.9363e-02, -1.4497e-02,  2.6027e-02],\n",
       "                        [-4.0765e-02,  1.4632e-02, -8.6514e-03],\n",
       "                        [ 2.8428e-02, -7.7671e-03, -1.6693e-02]],\n",
       "              \n",
       "                       [[ 3.1165e-02, -2.8768e-02,  4.5567e-02],\n",
       "                        [-3.3843e-02, -3.7156e-02,  2.9670e-02],\n",
       "                        [ 4.4025e-05, -2.5032e-02,  2.4964e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-4.9729e-02,  2.5081e-02,  2.2403e-02],\n",
       "                        [-6.1607e-03,  3.0788e-02, -3.9390e-02],\n",
       "                        [-3.4181e-02, -1.5915e-02, -1.9025e-02]],\n",
       "              \n",
       "                       [[-4.8885e-02, -2.6491e-03,  3.8601e-02],\n",
       "                        [-3.2981e-02,  1.6085e-02, -2.2478e-02],\n",
       "                        [-2.4350e-03, -2.4042e-03,  1.7677e-02]],\n",
       "              \n",
       "                       [[ 3.2755e-02, -4.1925e-02, -2.6922e-02],\n",
       "                        [-1.2203e-02, -2.6840e-02,  4.3364e-02],\n",
       "                        [-2.3772e-02,  1.1600e-03, -1.7288e-02]]]])),\n",
       "             ('conv_block1.3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1.])),\n",
       "             ('conv_block1.4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block1.4.running_mean',\n",
       "              tensor([-0.0578, -0.0071,  0.0030,  0.0085,  0.0532,  0.0041, -0.0520, -0.0509,\n",
       "                       0.0058, -0.0161, -0.0265,  0.0262, -0.0142, -0.0333, -0.0301,  0.0164,\n",
       "                       0.0356,  0.0362,  0.0129, -0.0011,  0.0103, -0.0051, -0.0249, -0.0028,\n",
       "                      -0.0192, -0.0138, -0.0365, -0.0277,  0.0152, -0.0090,  0.0477,  0.0758,\n",
       "                      -0.0468,  0.0255,  0.0025, -0.0078, -0.0391, -0.0262,  0.0687, -0.0137,\n",
       "                      -0.0320, -0.0403,  0.0034, -0.0347, -0.0180,  0.0613, -0.0324, -0.0853,\n",
       "                      -0.0216,  0.0428,  0.0684, -0.0091,  0.0375,  0.0248, -0.0586, -0.0306,\n",
       "                      -0.0524,  0.0094, -0.0546,  0.0151,  0.0307,  0.0441, -0.0498,  0.0283,\n",
       "                      -0.0382, -0.0553,  0.0676, -0.0481,  0.0861,  0.0165,  0.0556, -0.0537,\n",
       "                       0.0769,  0.0380, -0.0541, -0.0067, -0.0791,  0.0210,  0.0038,  0.0153,\n",
       "                       0.0676, -0.0558,  0.0109, -0.0548, -0.0542, -0.0038, -0.0332, -0.0445,\n",
       "                      -0.0731, -0.0103,  0.0733, -0.0090,  0.0744,  0.0423, -0.0034, -0.0238,\n",
       "                      -0.0560,  0.0359, -0.0216,  0.0051, -0.0246,  0.0443, -0.0018, -0.0304,\n",
       "                      -0.0208,  0.0122, -0.0348, -0.0012, -0.0407, -0.0250, -0.0588,  0.0284,\n",
       "                       0.0095, -0.0318,  0.0199, -0.0276,  0.0181, -0.0439,  0.0205,  0.0235,\n",
       "                      -0.0304, -0.0540, -0.0257,  0.0673,  0.0163,  0.0278, -0.0182, -0.0277])),\n",
       "             ('conv_block1.4.running_var',\n",
       "              tensor([0.9209, 0.9263, 0.9237, 0.9321, 0.9649, 0.9110, 0.9227, 0.9510, 0.9265,\n",
       "                      0.9166, 0.9194, 1.0009, 0.9304, 0.9276, 0.9360, 0.9344, 0.9292, 0.9478,\n",
       "                      0.9276, 0.9175, 0.9213, 0.9226, 0.9248, 0.9307, 0.9161, 0.9567, 0.9139,\n",
       "                      0.9136, 0.9762, 0.9141, 0.9229, 0.9322, 0.9235, 0.9166, 0.9532, 0.9233,\n",
       "                      0.9657, 0.9234, 0.9336, 0.9599, 0.9431, 0.9367, 0.9223, 0.9331, 0.9371,\n",
       "                      0.9259, 0.9327, 0.9282, 0.9320, 0.9478, 0.9398, 0.9269, 0.9518, 0.9230,\n",
       "                      0.9139, 0.9145, 0.9229, 0.9157, 0.9347, 0.9269, 0.9221, 0.9266, 0.9177,\n",
       "                      0.9200, 0.9349, 0.9272, 0.9300, 0.9308, 0.9367, 0.9222, 0.9268, 0.9331,\n",
       "                      0.9367, 0.9504, 0.9450, 0.9106, 0.9571, 0.9724, 0.9505, 0.9135, 0.9483,\n",
       "                      0.9226, 0.9431, 0.9559, 0.9377, 0.9195, 0.9325, 0.9233, 0.9318, 0.9297,\n",
       "                      0.9396, 0.9146, 0.9740, 0.9236, 0.9131, 0.9153, 0.9350, 0.9139, 0.9165,\n",
       "                      0.9348, 0.9180, 0.9673, 0.9251, 0.9253, 0.9263, 0.9230, 0.9325, 0.9314,\n",
       "                      0.9348, 0.9130, 0.9730, 0.9171, 0.9467, 0.9200, 0.9161, 0.9306, 0.9530,\n",
       "                      0.9119, 0.9136, 0.9340, 0.9185, 0.9255, 0.9230, 0.9270, 0.9302, 0.9224,\n",
       "                      0.9155, 0.9343])),\n",
       "             ('conv_block1.4.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block2.0.weight',\n",
       "              tensor([[[[-2.2357e-03, -1.9401e-02, -1.6195e-02],\n",
       "                        [ 3.9839e-02, -3.9864e-02, -1.8264e-02],\n",
       "                        [ 2.8714e-02,  2.4225e-02,  2.1078e-02]],\n",
       "              \n",
       "                       [[ 6.4388e-03,  2.7148e-03, -2.5950e-02],\n",
       "                        [-1.4002e-02, -2.0557e-02, -1.8397e-02],\n",
       "                        [-1.2283e-02, -5.5224e-03, -1.1681e-02]],\n",
       "              \n",
       "                       [[ 1.1795e-02, -8.0230e-03,  3.2687e-02],\n",
       "                        [ 4.0715e-02, -1.4871e-02,  2.5972e-03],\n",
       "                        [ 3.6026e-02,  3.3418e-03, -1.5331e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 3.1982e-02,  2.7928e-02, -2.2880e-02],\n",
       "                        [-2.3568e-02,  3.3433e-02,  2.6635e-02],\n",
       "                        [-1.1356e-02,  2.9850e-02,  7.0306e-03]],\n",
       "              \n",
       "                       [[-3.7248e-02, -4.1295e-02,  2.5222e-02],\n",
       "                        [ 1.0001e-02, -3.9457e-02, -2.2495e-02],\n",
       "                        [-5.7542e-03, -8.0857e-03,  2.9912e-02]],\n",
       "              \n",
       "                       [[-3.7138e-02,  1.6976e-02, -3.1359e-02],\n",
       "                        [-5.7323e-03,  1.7262e-02, -2.6545e-02],\n",
       "                        [-5.0090e-03, -1.3575e-02, -1.0325e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 1.2012e-02,  8.9401e-03, -3.6635e-02],\n",
       "                        [-2.1963e-02,  3.9799e-02,  2.5801e-03],\n",
       "                        [-3.8879e-02,  1.0218e-02,  9.2964e-03]],\n",
       "              \n",
       "                       [[ 1.2474e-02, -3.7035e-02,  1.9812e-02],\n",
       "                        [ 3.9583e-02,  3.3711e-02, -1.3790e-02],\n",
       "                        [-2.6941e-02,  4.8891e-04, -3.0734e-02]],\n",
       "              \n",
       "                       [[ 8.4989e-03, -2.1497e-02,  3.2675e-02],\n",
       "                        [-1.9254e-02,  3.2765e-02, -8.6032e-03],\n",
       "                        [-9.4799e-03, -1.7917e-02,  1.8524e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-2.2115e-02,  3.6134e-02, -4.1401e-03],\n",
       "                        [ 1.7535e-02, -1.9366e-02,  6.1314e-03],\n",
       "                        [ 2.1442e-02,  1.3018e-02, -2.6148e-02]],\n",
       "              \n",
       "                       [[-1.5576e-02,  1.8609e-02,  2.0684e-03],\n",
       "                        [ 2.0602e-02, -2.3489e-02, -2.1377e-02],\n",
       "                        [-3.6937e-03, -3.3733e-02, -2.6530e-02]],\n",
       "              \n",
       "                       [[ 3.8196e-02, -3.6672e-02, -3.5922e-02],\n",
       "                        [-2.9901e-02, -1.0263e-02, -1.1434e-02],\n",
       "                        [-1.3880e-02,  2.1525e-02, -1.6152e-03]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 8.0413e-03,  9.8009e-03,  5.2364e-03],\n",
       "                        [-1.2034e-02,  2.2903e-02,  4.7979e-03],\n",
       "                        [ 1.7210e-02,  8.8730e-03,  1.3458e-02]],\n",
       "              \n",
       "                       [[ 9.1133e-03,  6.4692e-03,  1.7988e-02],\n",
       "                        [-3.6500e-02, -1.5361e-02, -3.4904e-02],\n",
       "                        [ 6.8600e-03,  3.9470e-02,  2.2997e-02]],\n",
       "              \n",
       "                       [[-3.0263e-02,  3.0984e-02,  3.7290e-02],\n",
       "                        [ 8.9088e-03,  3.6577e-03, -2.5419e-02],\n",
       "                        [-2.8823e-02, -4.0984e-03, -2.5937e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-3.8148e-02, -6.7434e-03,  1.5746e-02],\n",
       "                        [ 2.1068e-02,  2.0472e-03, -2.4149e-02],\n",
       "                        [-3.7185e-02,  2.0401e-02, -4.0401e-02]],\n",
       "              \n",
       "                       [[ 1.6540e-02,  1.3596e-02,  5.5322e-03],\n",
       "                        [-3.9706e-02,  3.9100e-02,  2.1550e-02],\n",
       "                        [-6.5270e-03, -2.5908e-02, -4.3790e-03]],\n",
       "              \n",
       "                       [[ 4.3763e-03,  2.4398e-03,  6.0046e-03],\n",
       "                        [-6.6022e-03,  7.5924e-03,  2.8795e-02],\n",
       "                        [-3.1004e-02,  2.6779e-02,  1.2760e-02]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[-7.0812e-03, -2.6295e-02,  1.2505e-03],\n",
       "                        [ 1.8441e-03, -4.9487e-03,  4.0092e-02],\n",
       "                        [-2.7972e-02,  5.6619e-05,  3.2243e-02]],\n",
       "              \n",
       "                       [[ 1.6189e-02,  1.7299e-02, -3.2668e-02],\n",
       "                        [-3.5531e-02,  3.7249e-02,  1.1518e-02],\n",
       "                        [ 2.2873e-02, -1.0548e-02,  5.2312e-04]],\n",
       "              \n",
       "                       [[-2.7056e-02,  2.3797e-02,  2.1146e-02],\n",
       "                        [-1.8498e-02,  1.7071e-02, -3.4448e-02],\n",
       "                        [-5.2116e-03,  2.2650e-02,  2.1049e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 1.5197e-02, -3.0633e-02, -4.4611e-03],\n",
       "                        [ 2.3487e-02, -9.4760e-03,  1.1699e-03],\n",
       "                        [ 3.6428e-02, -2.6287e-02,  1.9119e-02]],\n",
       "              \n",
       "                       [[ 1.0299e-02, -2.3642e-02,  3.0693e-02],\n",
       "                        [-3.3872e-02,  3.2008e-02,  1.1288e-02],\n",
       "                        [ 1.7778e-02,  2.5693e-02, -8.0968e-04]],\n",
       "              \n",
       "                       [[-3.5810e-02,  3.3675e-02,  6.5868e-03],\n",
       "                        [ 1.4771e-02,  2.3011e-02, -4.7475e-03],\n",
       "                        [-2.5335e-02,  4.1541e-02, -8.1577e-03]]],\n",
       "              \n",
       "              \n",
       "                      [[[-1.8010e-02, -2.8801e-02,  3.9097e-02],\n",
       "                        [ 1.2625e-02, -9.8582e-03, -2.0828e-03],\n",
       "                        [ 3.4630e-02, -4.1196e-02,  2.9805e-02]],\n",
       "              \n",
       "                       [[-4.3121e-03,  6.3874e-03,  2.0665e-02],\n",
       "                        [-1.6860e-02,  1.1343e-02,  1.2753e-02],\n",
       "                        [-8.2398e-03,  1.4418e-02, -1.4019e-02]],\n",
       "              \n",
       "                       [[-1.5802e-02, -2.6336e-02,  2.2570e-02],\n",
       "                        [-6.9298e-03,  4.1156e-02,  2.4942e-02],\n",
       "                        [-1.4198e-02,  2.0417e-02,  1.5543e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-1.6409e-02,  3.2942e-02, -3.7538e-02],\n",
       "                        [-2.7503e-02,  1.2020e-02,  4.0052e-03],\n",
       "                        [ 3.1049e-02, -8.2903e-03, -4.1326e-02]],\n",
       "              \n",
       "                       [[ 3.8205e-02,  1.4094e-02,  3.2452e-02],\n",
       "                        [ 1.7958e-02, -2.0435e-04, -3.2376e-02],\n",
       "                        [-3.9802e-02, -1.5428e-02,  1.4036e-02]],\n",
       "              \n",
       "                       [[ 2.9640e-02, -2.0633e-02,  1.1454e-02],\n",
       "                        [ 1.6572e-02,  1.6328e-02, -4.1356e-02],\n",
       "                        [-1.0125e-02,  2.7299e-02,  3.9183e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[-1.6947e-02, -5.0176e-03,  3.9704e-02],\n",
       "                        [ 2.0435e-02, -2.2098e-02,  3.8657e-02],\n",
       "                        [-2.1792e-02,  9.0636e-03, -9.9477e-04]],\n",
       "              \n",
       "                       [[-1.1552e-02, -2.1535e-02,  3.2391e-02],\n",
       "                        [ 1.4220e-02,  1.5012e-02, -1.5453e-02],\n",
       "                        [ 1.3170e-02, -5.2113e-03, -3.8785e-02]],\n",
       "              \n",
       "                       [[-1.8273e-02,  2.8382e-02, -4.1081e-02],\n",
       "                        [-7.7343e-03,  3.0582e-02, -3.3662e-02],\n",
       "                        [-3.5391e-02,  2.2241e-02,  1.7997e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-3.2946e-02,  1.8709e-04,  4.0953e-03],\n",
       "                        [ 2.9720e-02, -1.8864e-02, -3.0070e-02],\n",
       "                        [-3.5093e-02,  8.8470e-03,  1.4818e-02]],\n",
       "              \n",
       "                       [[ 3.7756e-03, -1.2387e-02,  3.4623e-02],\n",
       "                        [ 3.8505e-02, -8.5876e-03, -1.3481e-02],\n",
       "                        [-4.0719e-03,  5.1510e-03, -8.5382e-04]],\n",
       "              \n",
       "                       [[ 4.1010e-02,  1.5027e-02, -8.3337e-03],\n",
       "                        [ 1.9354e-02, -1.8683e-02, -3.8041e-02],\n",
       "                        [-3.0060e-02,  2.6721e-02, -1.8595e-02]]]])),\n",
       "             ('conv_block2.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1.])),\n",
       "             ('conv_block2.1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.1.running_mean',\n",
       "              tensor([ 0.0326, -0.0414, -0.0357,  0.0301, -0.0293,  0.0466,  0.0574, -0.0503,\n",
       "                       0.0253,  0.0645, -0.0147,  0.0172, -0.0235,  0.0526, -0.0318, -0.1188,\n",
       "                       0.0064,  0.0407, -0.0620, -0.0074, -0.0066, -0.1215, -0.0050, -0.0216,\n",
       "                      -0.0510, -0.0276,  0.0842, -0.0086, -0.0352, -0.0175, -0.0397, -0.0489,\n",
       "                       0.0864,  0.0385,  0.0472, -0.0233, -0.0275,  0.0029, -0.0264,  0.0219,\n",
       "                      -0.0494,  0.0274, -0.0260,  0.0325, -0.0090,  0.0096,  0.0068, -0.0193,\n",
       "                      -0.0116, -0.0398,  0.0351, -0.0030, -0.0634,  0.0089,  0.1002, -0.0006,\n",
       "                      -0.0667, -0.0124,  0.0213, -0.0104,  0.0108,  0.0679, -0.0089,  0.0878,\n",
       "                      -0.0188, -0.0289, -0.0820,  0.0479,  0.0067, -0.0573, -0.0634, -0.0071,\n",
       "                      -0.0697, -0.0385, -0.0866, -0.0403, -0.0194, -0.0375, -0.0835,  0.0025,\n",
       "                       0.0066, -0.0566, -0.0085, -0.0316,  0.0060,  0.0552, -0.0321,  0.1052,\n",
       "                      -0.0478, -0.0574, -0.0076, -0.0479,  0.0043, -0.0215,  0.0114, -0.0832,\n",
       "                      -0.0417,  0.0367, -0.0620,  0.0484, -0.0098, -0.0108, -0.0156,  0.0735,\n",
       "                      -0.0842, -0.0402, -0.0299, -0.0851,  0.0700, -0.0193, -0.0682, -0.0223,\n",
       "                       0.0856, -0.0400,  0.0118, -0.0124,  0.0069, -0.0248,  0.0698,  0.0266,\n",
       "                       0.0216, -0.0633,  0.0265,  0.0207,  0.0280, -0.0028,  0.0524,  0.0089,\n",
       "                       0.0810, -0.0274,  0.0232, -0.0128, -0.0488,  0.0156, -0.0119,  0.0197,\n",
       "                       0.0320, -0.0504, -0.0070,  0.0565,  0.0214, -0.0343, -0.0649, -0.0391,\n",
       "                      -0.0316, -0.0353, -0.0070,  0.1225, -0.0250,  0.0085, -0.0929,  0.0359,\n",
       "                      -0.1092, -0.0299,  0.0297, -0.0207,  0.0397, -0.0353, -0.0721, -0.0501,\n",
       "                       0.0988,  0.0595,  0.0044, -0.0244, -0.0011, -0.0397, -0.0124,  0.0195,\n",
       "                      -0.0550, -0.0239,  0.0310,  0.0591, -0.0348,  0.0197,  0.0164,  0.0263,\n",
       "                       0.0391,  0.0669,  0.0086,  0.0136,  0.0224, -0.0464, -0.0716, -0.0016,\n",
       "                      -0.0527,  0.0844,  0.0302, -0.0862,  0.0019, -0.0195,  0.0513, -0.0014,\n",
       "                       0.0557, -0.0318, -0.0279,  0.0302, -0.0282,  0.0532, -0.0748,  0.1653,\n",
       "                       0.0878, -0.0529, -0.0749,  0.0111, -0.0669, -0.0100, -0.0155,  0.1055,\n",
       "                      -0.0157, -0.0444,  0.0355, -0.0184, -0.0387,  0.0044, -0.0091,  0.0425,\n",
       "                      -0.0332,  0.0669,  0.0255,  0.0004,  0.0370, -0.0609,  0.0723,  0.0239,\n",
       "                      -0.0003,  0.0226,  0.0337, -0.0800,  0.0646,  0.0435, -0.0351,  0.0168,\n",
       "                       0.0034,  0.0424, -0.0118, -0.0315,  0.0361, -0.0308,  0.0052, -0.0190,\n",
       "                      -0.0718,  0.0590, -0.0108, -0.0293,  0.0239, -0.0242, -0.0723, -0.1391,\n",
       "                      -0.0446,  0.0073, -0.0193,  0.0503,  0.0154,  0.1142,  0.0342,  0.0147])),\n",
       "             ('conv_block2.1.running_var',\n",
       "              tensor([0.9273, 0.9236, 0.9225, 0.9342, 0.9382, 0.9381, 0.9378, 0.9287, 0.9265,\n",
       "                      0.9450, 0.9363, 0.9358, 0.9233, 0.9334, 0.9206, 0.9361, 0.9259, 0.9295,\n",
       "                      0.9597, 0.9428, 0.9184, 0.9570, 0.9277, 0.9225, 0.9323, 0.9397, 0.9421,\n",
       "                      0.9743, 0.9276, 0.9337, 0.9253, 0.9194, 0.9355, 0.9427, 0.9396, 0.9267,\n",
       "                      0.9383, 0.9246, 0.9543, 0.9239, 0.9295, 0.9432, 0.9434, 0.9301, 0.9206,\n",
       "                      0.9171, 0.9400, 0.9274, 0.9704, 0.9210, 0.9286, 0.9305, 0.9286, 0.9257,\n",
       "                      0.9373, 0.9252, 0.9351, 0.9478, 0.9274, 0.9382, 0.9838, 0.9262, 0.9528,\n",
       "                      0.9315, 0.9273, 0.9444, 0.9365, 0.9398, 0.9235, 0.9295, 0.9356, 0.9338,\n",
       "                      0.9254, 0.9266, 0.9460, 0.9273, 0.9203, 0.9684, 0.9363, 0.9280, 0.9236,\n",
       "                      0.9492, 0.9369, 0.9290, 0.9327, 0.9269, 0.9309, 0.9397, 0.9278, 0.9393,\n",
       "                      0.9241, 0.9410, 0.9266, 0.9256, 0.9383, 0.9496, 0.9586, 0.9463, 0.9276,\n",
       "                      0.9274, 0.9351, 0.9285, 0.9368, 0.9403, 0.9194, 0.9259, 0.9385, 0.9330,\n",
       "                      0.9343, 0.9308, 0.9415, 0.9558, 0.9298, 0.9278, 0.9339, 0.9251, 0.9554,\n",
       "                      0.9205, 0.9301, 0.9304, 0.9292, 0.9228, 0.9187, 0.9377, 0.9195, 0.9291,\n",
       "                      0.9334, 0.9523, 0.9266, 0.9207, 0.9293, 0.9323, 0.9212, 0.9277, 0.9344,\n",
       "                      0.9255, 0.9300, 0.9256, 0.9308, 0.9375, 0.9206, 0.9321, 0.9390, 0.9269,\n",
       "                      0.9309, 0.9315, 0.9266, 0.9412, 0.9390, 0.9319, 0.9448, 0.9349, 0.9410,\n",
       "                      0.9327, 0.9284, 0.9476, 0.9343, 0.9208, 0.9307, 0.9526, 0.9405, 0.9290,\n",
       "                      0.9304, 0.9266, 0.9213, 0.9209, 0.9263, 0.9237, 0.9355, 0.9737, 0.9237,\n",
       "                      0.9222, 0.9428, 0.9246, 0.9428, 0.9426, 0.9285, 0.9270, 0.9332, 0.9351,\n",
       "                      0.9295, 0.9261, 0.9334, 0.9320, 0.9275, 0.9469, 0.9280, 0.9387, 0.9217,\n",
       "                      0.9260, 0.9261, 0.9345, 0.9369, 0.9294, 0.9185, 0.9283, 0.9247, 0.9315,\n",
       "                      0.9360, 0.9644, 0.9265, 0.9350, 0.9259, 0.9221, 0.9397, 0.9350, 0.9467,\n",
       "                      0.9358, 0.9431, 0.9224, 0.9297, 0.9262, 0.9258, 0.9359, 0.9217, 0.9345,\n",
       "                      0.9502, 0.9326, 0.9274, 0.9265, 0.9384, 0.9283, 0.9493, 0.9273, 0.9235,\n",
       "                      0.9298, 0.9330, 0.9408, 0.9416, 0.9415, 0.9314, 0.9279, 0.9293, 0.9294,\n",
       "                      0.9191, 0.9354, 0.9478, 0.9285, 0.9301, 0.9179, 0.9485, 0.9360, 0.9220,\n",
       "                      0.9225, 0.9172, 0.9164, 0.9253, 0.9371, 0.9301, 0.9222, 0.9331, 0.9271,\n",
       "                      0.9288, 0.9497, 0.9183, 0.9322])),\n",
       "             ('conv_block2.1.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block2.3.weight',\n",
       "              tensor([[[[ 1.8768e-02, -5.8644e-03, -2.0564e-02],\n",
       "                        [-3.2264e-03,  2.8432e-02,  2.4149e-02],\n",
       "                        [ 6.9282e-03, -1.2540e-02,  1.3594e-03]],\n",
       "              \n",
       "                       [[-1.5775e-02,  2.4082e-02, -2.4297e-02],\n",
       "                        [ 2.1282e-02,  4.5771e-03, -3.1856e-02],\n",
       "                        [ 7.5475e-04, -1.5688e-02,  7.9425e-03]],\n",
       "              \n",
       "                       [[ 2.5652e-02, -3.0750e-03,  2.4419e-02],\n",
       "                        [ 1.8312e-02,  3.5186e-02, -2.9556e-02],\n",
       "                        [-2.2855e-02, -3.0345e-02, -9.8150e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-3.1778e-02, -2.0450e-02,  1.8128e-04],\n",
       "                        [-2.4268e-02, -2.3422e-02, -3.5858e-02],\n",
       "                        [ 9.2721e-04,  3.2555e-02, -1.0458e-03]],\n",
       "              \n",
       "                       [[-2.9123e-02, -8.7798e-03, -1.1447e-03],\n",
       "                        [ 7.8811e-03, -2.0877e-02,  1.3878e-02],\n",
       "                        [-1.2912e-02, -6.9588e-03, -8.1294e-03]],\n",
       "              \n",
       "                       [[ 3.2028e-02,  2.9247e-02,  1.1497e-02],\n",
       "                        [-3.8820e-03,  6.0294e-03, -2.9544e-02],\n",
       "                        [ 2.7587e-02, -1.2274e-02,  1.6475e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[-3.5195e-02,  1.0773e-02,  3.0717e-02],\n",
       "                        [-4.2049e-03, -1.9465e-02,  2.1817e-02],\n",
       "                        [-9.4598e-03,  1.7134e-03, -2.9634e-02]],\n",
       "              \n",
       "                       [[-1.6484e-02,  3.0895e-02, -5.8831e-03],\n",
       "                        [ 1.8803e-02, -3.4956e-02,  1.5656e-02],\n",
       "                        [ 3.4989e-02, -3.2267e-02,  1.3689e-02]],\n",
       "              \n",
       "                       [[-2.7834e-02, -2.6814e-02,  2.5125e-02],\n",
       "                        [ 6.6027e-03, -1.9337e-02, -1.3118e-02],\n",
       "                        [ 8.4797e-03, -1.1583e-02, -2.7640e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 3.3212e-02,  1.0642e-02,  2.9928e-03],\n",
       "                        [ 9.4847e-03,  1.8692e-02, -2.8347e-02],\n",
       "                        [ 5.5950e-04,  4.3581e-03, -2.1465e-02]],\n",
       "              \n",
       "                       [[-1.6386e-02, -3.1687e-02,  1.8563e-03],\n",
       "                        [-9.1707e-03, -1.1440e-03, -1.7758e-02],\n",
       "                        [ 1.8297e-02,  2.5078e-02, -3.2219e-02]],\n",
       "              \n",
       "                       [[ 4.8099e-03, -7.0626e-03,  4.8975e-04],\n",
       "                        [-2.0749e-02, -2.2338e-02, -5.6504e-04],\n",
       "                        [ 8.8416e-04,  6.7634e-03, -1.9831e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 2.9164e-02,  2.7444e-02, -7.6408e-03],\n",
       "                        [ 2.3091e-02, -1.0931e-02,  1.2614e-02],\n",
       "                        [ 2.4576e-02,  1.1509e-02,  2.0829e-02]],\n",
       "              \n",
       "                       [[-5.4788e-03,  1.3917e-02,  3.9531e-03],\n",
       "                        [-3.2672e-03,  3.1571e-02, -1.6878e-02],\n",
       "                        [-1.1666e-02, -2.3186e-02, -8.2641e-03]],\n",
       "              \n",
       "                       [[-1.8665e-02, -1.2299e-02, -2.3843e-02],\n",
       "                        [ 1.7339e-02,  1.5075e-02, -7.9928e-05],\n",
       "                        [ 3.5565e-02,  2.4868e-02,  7.5227e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-5.9930e-03,  3.4883e-02,  7.0264e-04],\n",
       "                        [-1.9241e-02,  3.3346e-02,  2.2697e-02],\n",
       "                        [-2.8776e-03,  1.9488e-02, -2.3979e-02]],\n",
       "              \n",
       "                       [[ 1.4087e-02,  1.0681e-02, -2.5482e-02],\n",
       "                        [-7.4678e-03, -3.1474e-02,  2.5100e-02],\n",
       "                        [-9.9278e-03, -2.1323e-02,  1.0725e-02]],\n",
       "              \n",
       "                       [[ 1.9033e-02, -2.9603e-02, -1.2518e-02],\n",
       "                        [ 7.3382e-03, -1.4652e-02,  4.7692e-03],\n",
       "                        [ 9.3133e-04, -1.1807e-02, -1.5789e-02]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[ 3.6827e-03, -2.8791e-02, -3.2597e-02],\n",
       "                        [ 2.7361e-02, -2.7021e-02,  1.1521e-02],\n",
       "                        [ 2.1423e-02,  2.3109e-02, -3.4020e-02]],\n",
       "              \n",
       "                       [[-3.1266e-02,  9.4238e-03, -3.4382e-02],\n",
       "                        [-1.4389e-02,  2.8757e-02, -5.4041e-03],\n",
       "                        [-1.4876e-02, -6.6279e-03,  6.2771e-03]],\n",
       "              \n",
       "                       [[-3.2333e-02,  1.3916e-02,  2.7347e-02],\n",
       "                        [ 7.9369e-03, -2.3607e-02, -2.4783e-02],\n",
       "                        [-2.2253e-02,  1.1920e-02,  1.3898e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-2.7756e-02, -7.9366e-03, -2.9130e-02],\n",
       "                        [-3.3674e-02,  2.1497e-02, -3.4795e-02],\n",
       "                        [-3.5425e-02,  1.1212e-02, -3.4605e-02]],\n",
       "              \n",
       "                       [[-7.7254e-04,  2.5707e-02,  1.1464e-03],\n",
       "                        [ 1.5527e-02, -1.7976e-02, -2.0516e-02],\n",
       "                        [ 2.0616e-02,  2.2114e-02,  5.3495e-03]],\n",
       "              \n",
       "                       [[-2.8591e-02,  2.6903e-02,  4.3878e-03],\n",
       "                        [-2.1102e-02,  2.5869e-02, -3.2658e-02],\n",
       "                        [ 1.2667e-02,  9.9270e-03, -8.8681e-03]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 1.8327e-02, -2.4251e-02, -6.4773e-03],\n",
       "                        [-3.3316e-02,  3.8994e-03,  3.4758e-02],\n",
       "                        [ 3.2488e-02, -2.7057e-02, -1.4031e-02]],\n",
       "              \n",
       "                       [[-2.3746e-02,  1.0730e-02,  8.2842e-03],\n",
       "                        [-2.3351e-02, -1.4503e-02,  1.7498e-02],\n",
       "                        [ 7.4909e-03,  3.1925e-02,  7.4157e-03]],\n",
       "              \n",
       "                       [[-4.2588e-03, -1.5211e-02,  1.3807e-02],\n",
       "                        [-3.5321e-02,  1.5308e-02, -3.1756e-02],\n",
       "                        [-3.1901e-02, -2.6725e-02,  1.6423e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-1.3318e-02,  1.6287e-02, -1.5480e-02],\n",
       "                        [ 4.5888e-03,  3.3711e-02, -1.7291e-02],\n",
       "                        [-1.9715e-02,  1.4107e-02,  3.0781e-02]],\n",
       "              \n",
       "                       [[-3.1695e-02, -2.3401e-02,  1.7031e-02],\n",
       "                        [-3.0071e-02,  1.8118e-02,  5.1963e-03],\n",
       "                        [ 3.1861e-02, -3.7004e-03,  2.8338e-02]],\n",
       "              \n",
       "                       [[ 1.8160e-03, -2.4444e-02,  1.7135e-02],\n",
       "                        [-1.5277e-02, -9.3285e-03, -3.6042e-02],\n",
       "                        [-2.2007e-02, -2.6176e-02, -3.1270e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 2.2655e-02,  5.9004e-03, -3.4728e-04],\n",
       "                        [-8.1763e-03, -2.4671e-02,  2.8284e-02],\n",
       "                        [ 1.0283e-02,  2.7941e-02,  1.1826e-02]],\n",
       "              \n",
       "                       [[ 1.2070e-02,  1.3803e-02,  3.3613e-02],\n",
       "                        [ 3.0647e-02, -8.4947e-03, -2.0792e-02],\n",
       "                        [-2.5913e-02, -3.5684e-02, -4.4891e-03]],\n",
       "              \n",
       "                       [[ 3.2675e-02, -2.3818e-02, -2.8686e-02],\n",
       "                        [-1.3301e-02,  6.8431e-03,  1.8925e-02],\n",
       "                        [-1.7057e-02, -2.7960e-02,  8.4232e-04]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 1.2668e-03, -1.3980e-02,  1.3806e-02],\n",
       "                        [-3.2225e-02, -1.2658e-02,  1.1972e-02],\n",
       "                        [ 2.3485e-03, -1.0432e-02,  2.7536e-02]],\n",
       "              \n",
       "                       [[ 3.5276e-02, -5.4583e-03,  7.4934e-04],\n",
       "                        [-3.3883e-04, -3.3100e-02,  1.6276e-02],\n",
       "                        [-1.2876e-02,  2.9375e-02,  1.1079e-02]],\n",
       "              \n",
       "                       [[ 4.9404e-03, -3.5642e-02,  2.7353e-02],\n",
       "                        [ 2.2732e-02,  2.1517e-02,  2.8142e-02],\n",
       "                        [-8.3817e-03,  1.8519e-02,  1.8149e-02]]]])),\n",
       "             ('conv_block2.3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1.])),\n",
       "             ('conv_block2.4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block2.4.running_mean',\n",
       "              tensor([ 2.7301e-02, -2.2797e-02, -6.0425e-02, -2.5208e-02, -1.2334e-02,\n",
       "                      -7.6347e-03, -3.9661e-02, -4.5991e-03, -9.5506e-03,  4.3321e-03,\n",
       "                       1.1227e-01,  2.7883e-02, -1.2780e-02,  2.8714e-02, -3.3283e-02,\n",
       "                       2.1848e-02,  4.8917e-03, -8.0872e-02, -3.3389e-02, -2.6179e-02,\n",
       "                       7.2009e-03, -1.9839e-02,  9.7876e-02, -2.5146e-02,  1.1276e-01,\n",
       "                      -4.0627e-02,  4.0494e-03,  5.3927e-02, -2.3662e-02, -6.3415e-03,\n",
       "                       5.1819e-04, -1.1430e-02,  2.5208e-02,  6.2897e-03,  8.1939e-03,\n",
       "                      -2.9893e-03, -2.3254e-02,  3.2453e-02,  6.3174e-03,  1.7455e-02,\n",
       "                       1.2197e-02,  1.0705e-02,  5.9358e-03, -2.1967e-02, -2.2913e-02,\n",
       "                      -5.2938e-02,  4.5275e-03, -1.4015e-02,  4.8318e-02, -3.7896e-02,\n",
       "                      -1.6149e-02,  5.6886e-02,  3.2309e-03, -4.5018e-02,  9.4832e-03,\n",
       "                      -3.7766e-02,  1.8168e-02, -3.2033e-03, -2.2994e-02,  5.3958e-03,\n",
       "                      -6.1230e-02, -5.0627e-02, -1.0186e-02, -3.0743e-02,  3.9715e-02,\n",
       "                      -6.2030e-03,  5.3567e-02,  3.5900e-02,  1.7090e-02, -6.3589e-02,\n",
       "                      -1.8657e-02,  1.7423e-02, -4.0838e-02, -1.1287e-02,  5.6565e-03,\n",
       "                       3.9499e-02,  9.7647e-03,  5.9998e-03, -1.2603e-02,  3.4791e-03,\n",
       "                      -1.7191e-02,  2.6146e-02, -3.8350e-02, -1.5648e-02,  8.4868e-02,\n",
       "                      -6.1445e-03,  6.3517e-02, -5.0281e-02,  1.2898e-02, -5.2336e-03,\n",
       "                       1.0679e-02, -2.8420e-02, -5.0918e-02, -1.0909e-02,  3.7512e-02,\n",
       "                       2.5308e-02, -2.7370e-02, -8.7513e-03, -4.2668e-02, -6.9932e-02,\n",
       "                       3.7398e-02, -1.4884e-02,  8.5821e-02,  2.8097e-02, -5.5767e-02,\n",
       "                      -6.7819e-02,  2.4442e-02,  7.5427e-03,  2.9779e-02,  9.9004e-04,\n",
       "                       2.9532e-02,  2.0742e-03,  3.1167e-02, -3.2755e-02,  6.1247e-03,\n",
       "                       4.7009e-02, -3.6213e-02,  7.8948e-03,  1.8513e-02,  3.8912e-02,\n",
       "                      -1.1825e-02,  2.3083e-02, -3.4733e-02, -7.1345e-03, -2.6447e-02,\n",
       "                       1.3578e-02, -7.4043e-03,  5.5019e-02, -3.3302e-02, -1.3677e-02,\n",
       "                      -1.6135e-02, -4.5906e-05, -6.2935e-04,  2.1031e-02, -1.1451e-02,\n",
       "                      -3.0469e-02,  4.3851e-02,  2.0102e-02,  2.5482e-02, -4.5217e-03,\n",
       "                       4.7674e-02, -1.1751e-02, -3.2487e-02, -2.6169e-02, -3.9376e-02,\n",
       "                       1.3147e-02,  6.8436e-02, -1.9800e-02,  6.2390e-02, -4.5798e-02,\n",
       "                       3.9032e-02,  4.6252e-02, -3.8773e-02, -3.7380e-02,  8.6147e-02,\n",
       "                      -5.6128e-02, -6.0430e-02, -3.2941e-02,  9.4440e-03, -4.3810e-02,\n",
       "                      -1.8781e-02,  5.0116e-03, -2.9681e-02,  8.5425e-03, -1.6523e-02,\n",
       "                      -2.2143e-02, -1.2473e-02,  2.4486e-02,  4.5518e-02,  2.4767e-02,\n",
       "                       2.1521e-02, -5.9188e-02, -1.1552e-02, -2.3343e-02,  4.7988e-02,\n",
       "                      -1.3974e-02,  2.3524e-02, -4.4790e-03, -1.7963e-02, -2.3389e-02,\n",
       "                      -1.1081e-02,  3.7422e-02,  1.3384e-02, -1.8070e-02,  5.2537e-02,\n",
       "                       1.1983e-02, -3.4341e-02, -8.1077e-02, -3.8545e-02,  1.3502e-02,\n",
       "                      -2.5763e-02, -6.0755e-03, -1.2154e-02, -1.1768e-02, -5.6745e-02,\n",
       "                       3.2735e-02,  5.2083e-03, -3.6376e-02, -4.4818e-02, -3.9789e-03,\n",
       "                       9.5382e-03,  7.2987e-03, -2.4252e-02,  3.5967e-03,  3.6698e-02,\n",
       "                      -3.6915e-02,  1.6282e-02,  3.4511e-02,  4.0647e-02,  1.7507e-02,\n",
       "                       2.4702e-02, -2.7018e-02,  1.9650e-02, -6.2640e-03,  5.1251e-02,\n",
       "                       4.8464e-02, -3.5758e-02,  4.7979e-02,  6.1359e-03,  8.2931e-04,\n",
       "                       5.5458e-03, -1.4852e-02, -8.0650e-03, -4.6594e-02,  5.3788e-02,\n",
       "                       2.6273e-02,  3.5554e-02,  9.8672e-04,  1.4105e-03, -7.2994e-03,\n",
       "                       5.2190e-03, -3.0482e-02, -3.4343e-02, -5.0896e-02, -4.5915e-02,\n",
       "                      -2.1543e-02,  4.7911e-02,  3.5553e-02, -4.6603e-03, -1.1945e-02,\n",
       "                       1.5342e-02,  5.4330e-02, -3.7008e-02,  1.1268e-03,  6.5505e-02,\n",
       "                      -6.4148e-03,  3.2613e-02, -7.3041e-02, -3.6621e-02, -1.0006e-02,\n",
       "                       1.7114e-03, -6.1260e-02,  1.9174e-03, -5.2961e-02, -2.5988e-02,\n",
       "                      -1.6101e-02])),\n",
       "             ('conv_block2.4.running_var',\n",
       "              tensor([0.9333, 0.9316, 0.9272, 0.9387, 0.9298, 0.9284, 0.9251, 0.9427, 0.9369,\n",
       "                      0.9304, 0.9447, 0.9325, 0.9295, 0.9297, 0.9214, 0.9295, 0.9408, 0.9444,\n",
       "                      0.9341, 0.9270, 0.9281, 0.9292, 0.9276, 0.9292, 0.9454, 0.9319, 0.9311,\n",
       "                      0.9294, 0.9424, 0.9260, 0.9344, 0.9269, 0.9387, 0.9243, 0.9431, 0.9415,\n",
       "                      0.9304, 0.9308, 0.9434, 0.9319, 0.9330, 0.9264, 0.9369, 0.9373, 0.9287,\n",
       "                      0.9257, 0.9336, 0.9318, 0.9301, 0.9321, 0.9234, 0.9420, 0.9493, 0.9264,\n",
       "                      0.9415, 0.9349, 0.9310, 0.9223, 0.9324, 0.9305, 0.9318, 0.9230, 0.9321,\n",
       "                      0.9260, 0.9415, 0.9270, 0.9313, 0.9317, 0.9247, 0.9404, 0.9358, 0.9344,\n",
       "                      0.9284, 0.9389, 0.9259, 0.9327, 0.9240, 0.9372, 0.9427, 0.9270, 0.9319,\n",
       "                      0.9252, 0.9553, 0.9347, 0.9554, 0.9281, 0.9332, 0.9260, 0.9237, 0.9349,\n",
       "                      0.9265, 0.9393, 0.9334, 0.9368, 0.9304, 0.9349, 0.9209, 0.9369, 0.9302,\n",
       "                      0.9364, 0.9405, 0.9318, 0.9348, 0.9294, 0.9351, 0.9340, 0.9443, 0.9250,\n",
       "                      0.9243, 0.9354, 0.9314, 0.9222, 0.9240, 0.9336, 0.9306, 0.9761, 0.9271,\n",
       "                      0.9480, 0.9371, 0.9312, 0.9243, 0.9234, 0.9260, 0.9289, 0.9236, 0.9299,\n",
       "                      0.9304, 0.9250, 0.9455, 0.9227, 0.9339, 0.9317, 0.9353, 0.9261, 0.9271,\n",
       "                      0.9275, 0.9292, 0.9355, 0.9263, 0.9307, 0.9317, 0.9278, 0.9324, 0.9373,\n",
       "                      0.9266, 0.9297, 0.9281, 0.9407, 0.9371, 0.9239, 0.9228, 0.9258, 0.9319,\n",
       "                      0.9299, 0.9394, 0.9320, 0.9393, 0.9405, 0.9211, 0.9288, 0.9322, 0.9298,\n",
       "                      0.9306, 0.9270, 0.9255, 0.9309, 0.9258, 0.9277, 0.9333, 0.9350, 0.9270,\n",
       "                      0.9382, 0.9244, 0.9218, 0.9300, 0.9342, 0.9318, 0.9401, 0.9235, 0.9259,\n",
       "                      0.9344, 0.9330, 0.9435, 0.9295, 0.9330, 0.9273, 0.9258, 0.9304, 0.9415,\n",
       "                      0.9294, 0.9321, 0.9373, 0.9283, 0.9275, 0.9254, 0.9310, 0.9431, 0.9433,\n",
       "                      0.9290, 0.9278, 0.9413, 0.9293, 0.9428, 0.9263, 0.9304, 0.9388, 0.9343,\n",
       "                      0.9352, 0.9374, 0.9298, 0.9379, 0.9336, 0.9341, 0.9307, 0.9441, 0.9371,\n",
       "                      0.9197, 0.9324, 0.9309, 0.9310, 0.9314, 0.9306, 0.9346, 0.9442, 0.9296,\n",
       "                      0.9353, 0.9253, 0.9342, 0.9237, 0.9253, 0.9288, 0.9365, 0.9262, 0.9373,\n",
       "                      0.9321, 0.9391, 0.9270, 0.9406, 0.9293, 0.9224, 0.9317, 0.9315, 0.9296,\n",
       "                      0.9319, 0.9352, 0.9211, 0.9256, 0.9308, 0.9333, 0.9353, 0.9313, 0.9250,\n",
       "                      0.9264, 0.9243, 0.9246, 0.9263])),\n",
       "             ('conv_block2.4.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block3.0.weight',\n",
       "              tensor([[[[ 0.0138,  0.0078, -0.0093],\n",
       "                        [ 0.0170,  0.0180,  0.0157],\n",
       "                        [ 0.0139, -0.0224, -0.0016]],\n",
       "              \n",
       "                       [[ 0.0176, -0.0276, -0.0074],\n",
       "                        [-0.0028,  0.0145,  0.0233],\n",
       "                        [ 0.0139,  0.0283, -0.0225]],\n",
       "              \n",
       "                       [[-0.0111,  0.0033,  0.0020],\n",
       "                        [ 0.0260, -0.0201, -0.0111],\n",
       "                        [-0.0092,  0.0133, -0.0191]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0290,  0.0113, -0.0181],\n",
       "                        [ 0.0274, -0.0082, -0.0050],\n",
       "                        [-0.0268, -0.0220, -0.0191]],\n",
       "              \n",
       "                       [[-0.0073, -0.0209, -0.0003],\n",
       "                        [ 0.0003, -0.0004, -0.0197],\n",
       "                        [ 0.0063, -0.0165,  0.0149]],\n",
       "              \n",
       "                       [[-0.0261,  0.0077, -0.0232],\n",
       "                        [-0.0045,  0.0151,  0.0204],\n",
       "                        [ 0.0227, -0.0010, -0.0139]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0021, -0.0170,  0.0039],\n",
       "                        [-0.0148, -0.0258,  0.0136],\n",
       "                        [-0.0234, -0.0292, -0.0175]],\n",
       "              \n",
       "                       [[ 0.0125, -0.0092,  0.0123],\n",
       "                        [ 0.0159,  0.0235,  0.0046],\n",
       "                        [ 0.0290, -0.0252,  0.0242]],\n",
       "              \n",
       "                       [[-0.0020,  0.0062,  0.0153],\n",
       "                        [-0.0042, -0.0225, -0.0211],\n",
       "                        [ 0.0129, -0.0106,  0.0157]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0180,  0.0248,  0.0099],\n",
       "                        [-0.0021, -0.0167, -0.0072],\n",
       "                        [ 0.0223, -0.0071, -0.0113]],\n",
       "              \n",
       "                       [[-0.0120, -0.0124,  0.0070],\n",
       "                        [-0.0030,  0.0141, -0.0143],\n",
       "                        [ 0.0205,  0.0071,  0.0221]],\n",
       "              \n",
       "                       [[ 0.0283, -0.0130,  0.0163],\n",
       "                        [ 0.0222,  0.0230, -0.0214],\n",
       "                        [ 0.0278, -0.0223,  0.0094]]],\n",
       "              \n",
       "              \n",
       "                      [[[-0.0175, -0.0133,  0.0035],\n",
       "                        [ 0.0245, -0.0199,  0.0100],\n",
       "                        [-0.0035, -0.0041, -0.0048]],\n",
       "              \n",
       "                       [[ 0.0047, -0.0169,  0.0075],\n",
       "                        [ 0.0012, -0.0019, -0.0186],\n",
       "                        [ 0.0052, -0.0250, -0.0051]],\n",
       "              \n",
       "                       [[ 0.0147,  0.0260, -0.0032],\n",
       "                        [ 0.0071, -0.0236,  0.0150],\n",
       "                        [-0.0092, -0.0294,  0.0198]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 0.0225,  0.0231,  0.0051],\n",
       "                        [-0.0107,  0.0243, -0.0212],\n",
       "                        [ 0.0199,  0.0179, -0.0137]],\n",
       "              \n",
       "                       [[ 0.0216,  0.0027,  0.0120],\n",
       "                        [-0.0172, -0.0290, -0.0178],\n",
       "                        [ 0.0108, -0.0131,  0.0184]],\n",
       "              \n",
       "                       [[-0.0210,  0.0292, -0.0265],\n",
       "                        [ 0.0095, -0.0055,  0.0229],\n",
       "                        [ 0.0030, -0.0182,  0.0116]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[-0.0256, -0.0063,  0.0233],\n",
       "                        [-0.0093,  0.0288, -0.0236],\n",
       "                        [ 0.0081, -0.0275,  0.0196]],\n",
       "              \n",
       "                       [[-0.0251, -0.0016,  0.0122],\n",
       "                        [-0.0081, -0.0203, -0.0193],\n",
       "                        [-0.0141, -0.0006, -0.0067]],\n",
       "              \n",
       "                       [[ 0.0230, -0.0039,  0.0231],\n",
       "                        [-0.0093, -0.0252,  0.0229],\n",
       "                        [-0.0151,  0.0180, -0.0265]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0109,  0.0043,  0.0121],\n",
       "                        [ 0.0190,  0.0091,  0.0254],\n",
       "                        [-0.0224, -0.0107, -0.0187]],\n",
       "              \n",
       "                       [[-0.0066, -0.0006,  0.0004],\n",
       "                        [ 0.0034,  0.0096, -0.0213],\n",
       "                        [ 0.0032, -0.0190, -0.0007]],\n",
       "              \n",
       "                       [[-0.0020,  0.0154, -0.0044],\n",
       "                        [-0.0055, -0.0082,  0.0143],\n",
       "                        [-0.0020,  0.0228, -0.0165]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0156,  0.0099, -0.0160],\n",
       "                        [-0.0043,  0.0076, -0.0283],\n",
       "                        [ 0.0026,  0.0199,  0.0213]],\n",
       "              \n",
       "                       [[-0.0091, -0.0028, -0.0121],\n",
       "                        [-0.0181,  0.0131,  0.0252],\n",
       "                        [-0.0111,  0.0244, -0.0274]],\n",
       "              \n",
       "                       [[ 0.0067, -0.0221,  0.0055],\n",
       "                        [-0.0249, -0.0087,  0.0044],\n",
       "                        [-0.0174,  0.0003,  0.0049]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0213, -0.0066,  0.0242],\n",
       "                        [ 0.0064,  0.0290, -0.0123],\n",
       "                        [ 0.0098,  0.0226, -0.0025]],\n",
       "              \n",
       "                       [[ 0.0222,  0.0242, -0.0048],\n",
       "                        [-0.0145, -0.0106, -0.0180],\n",
       "                        [-0.0073, -0.0205,  0.0261]],\n",
       "              \n",
       "                       [[ 0.0131, -0.0254, -0.0053],\n",
       "                        [-0.0293, -0.0224, -0.0108],\n",
       "                        [ 0.0148,  0.0216,  0.0263]]],\n",
       "              \n",
       "              \n",
       "                      [[[ 0.0167,  0.0171, -0.0170],\n",
       "                        [ 0.0200, -0.0241,  0.0052],\n",
       "                        [-0.0265, -0.0154,  0.0191]],\n",
       "              \n",
       "                       [[-0.0121, -0.0112, -0.0145],\n",
       "                        [ 0.0165, -0.0148,  0.0269],\n",
       "                        [ 0.0006,  0.0138,  0.0055]],\n",
       "              \n",
       "                       [[-0.0219, -0.0026,  0.0112],\n",
       "                        [-0.0073, -0.0030,  0.0010],\n",
       "                        [ 0.0002,  0.0030,  0.0228]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-0.0109,  0.0123, -0.0293],\n",
       "                        [ 0.0135,  0.0118,  0.0017],\n",
       "                        [-0.0089, -0.0112,  0.0191]],\n",
       "              \n",
       "                       [[ 0.0198,  0.0102, -0.0209],\n",
       "                        [ 0.0115,  0.0152, -0.0177],\n",
       "                        [-0.0202,  0.0171,  0.0013]],\n",
       "              \n",
       "                       [[ 0.0278,  0.0272, -0.0249],\n",
       "                        [ 0.0009, -0.0272,  0.0104],\n",
       "                        [ 0.0133,  0.0108, -0.0052]]]])),\n",
       "             ('conv_block3.0.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.1.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('conv_block3.1.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.1.running_mean',\n",
       "              tensor([ 7.6806e-02,  4.8659e-02, -3.4878e-02,  7.3575e-02, -2.2690e-02,\n",
       "                       5.0217e-02, -3.8088e-02, -8.0222e-02, -1.9188e-02,  2.8503e-03,\n",
       "                      -1.1216e-01,  2.0760e-02,  5.4172e-02, -1.5800e-02,  1.2113e-02,\n",
       "                      -5.8767e-03, -2.0341e-02,  1.4744e-01,  2.4138e-02,  6.2463e-02,\n",
       "                       4.7704e-02,  1.8580e-02,  2.0456e-02,  4.5768e-02,  1.4177e-02,\n",
       "                       5.2424e-02, -8.0625e-02, -8.2717e-02,  6.6164e-02,  5.6229e-02,\n",
       "                      -5.6238e-02,  2.5355e-03,  5.7123e-02, -6.7674e-02, -6.1842e-03,\n",
       "                      -6.3591e-03, -7.3561e-03, -5.3431e-02, -3.7827e-02,  8.4501e-03,\n",
       "                      -3.8243e-02,  1.0304e-01, -1.4238e-02, -6.8046e-02,  1.0581e-02,\n",
       "                       9.3612e-02,  2.7863e-02, -3.7495e-03,  1.3570e-02, -2.2118e-02,\n",
       "                      -2.0447e-02,  4.1827e-02,  2.2786e-02,  7.4495e-02,  7.3557e-02,\n",
       "                      -1.4271e-01, -1.8917e-02,  6.8551e-02,  5.5440e-03,  1.5923e-02,\n",
       "                       2.8153e-02,  1.2391e-01, -6.9320e-02,  3.3619e-02, -9.2285e-02,\n",
       "                       3.4433e-02,  1.6744e-02, -4.7700e-03, -2.2734e-02, -4.5612e-02,\n",
       "                      -4.5988e-03,  4.1147e-03,  8.1210e-02,  5.7817e-02, -4.3813e-02,\n",
       "                       6.3385e-03, -2.7081e-02,  3.3741e-02,  5.9501e-02,  5.7478e-02,\n",
       "                      -3.3183e-02, -3.5972e-02, -7.4539e-03,  3.8206e-02, -4.6682e-02,\n",
       "                       1.5782e-03,  6.5015e-02,  2.3311e-03, -1.1575e-01,  1.1004e-02,\n",
       "                      -2.4754e-02, -1.5529e-02,  6.3971e-03, -2.0346e-02,  9.9956e-02,\n",
       "                       3.1644e-02,  2.8647e-02, -9.7959e-02, -3.8191e-02,  4.8596e-02,\n",
       "                       3.6793e-02, -1.6108e-02,  1.0206e-01, -4.6531e-02, -5.0882e-02,\n",
       "                      -7.8301e-02,  2.6877e-02,  3.9559e-02, -3.2006e-02,  4.0864e-02,\n",
       "                       1.6561e-02, -4.5502e-02,  6.8122e-02, -3.9231e-02, -1.4695e-02,\n",
       "                      -3.1105e-02, -3.1955e-02,  1.6261e-02,  2.1376e-03, -4.4357e-02,\n",
       "                      -8.6288e-02, -8.7397e-03, -7.3006e-02,  6.1342e-02, -2.1183e-02,\n",
       "                      -7.2525e-03, -3.9981e-02, -3.9321e-02,  2.8446e-03,  1.4727e-01,\n",
       "                       3.1286e-02,  9.9145e-03,  3.9244e-02,  3.2070e-02, -8.4363e-02,\n",
       "                      -1.3933e-01, -3.5677e-02,  1.0521e-02,  3.7597e-02, -9.6300e-02,\n",
       "                      -2.4430e-02,  4.4489e-02, -1.0709e-01,  2.0431e-02, -2.1524e-02,\n",
       "                       3.5764e-02,  7.9703e-02,  6.7049e-02,  5.8834e-02, -3.1918e-02,\n",
       "                       5.3430e-02,  9.7583e-02, -2.9564e-02,  3.9340e-02, -6.8804e-02,\n",
       "                       1.1915e-01,  7.0642e-03,  3.0279e-02,  6.7779e-02,  7.2925e-02,\n",
       "                      -8.8801e-02, -2.7496e-02,  6.9947e-02,  6.5575e-02,  2.3961e-03,\n",
       "                      -3.0438e-02, -4.9832e-02, -9.4583e-03, -9.9479e-02,  4.2113e-02,\n",
       "                       5.2543e-03, -7.1140e-03,  3.0905e-02, -2.9157e-02,  2.0210e-02,\n",
       "                      -2.8874e-02,  2.0779e-02,  7.9592e-02,  4.8985e-02,  2.4395e-02,\n",
       "                       1.0589e-02,  2.0512e-04,  8.2153e-03,  1.4956e-03, -7.7297e-02,\n",
       "                       6.4122e-02,  4.4813e-02,  3.8317e-02,  2.0683e-02, -4.4070e-02,\n",
       "                       5.4177e-02, -1.0458e-03,  1.8640e-02, -6.3760e-02,  4.1674e-02,\n",
       "                       2.4251e-02, -5.1351e-02, -9.4529e-02, -1.1126e-02, -1.0855e-01,\n",
       "                      -5.9304e-02,  2.6102e-02,  4.1634e-02,  1.1280e-03,  1.1194e-01,\n",
       "                      -2.6442e-02,  8.8135e-03, -2.9154e-02, -3.4891e-02, -6.1504e-02,\n",
       "                       2.5553e-02, -1.0965e-01,  1.1937e-02, -2.7544e-02, -2.7722e-02,\n",
       "                       9.3100e-03, -2.9560e-02, -9.0857e-02, -1.0603e-01, -1.3602e-01,\n",
       "                      -6.5965e-02,  1.6893e-02,  7.9093e-02, -4.6061e-02, -4.8415e-02,\n",
       "                      -6.1788e-03, -1.9285e-02, -3.8867e-02, -1.9261e-02, -8.2631e-02,\n",
       "                       6.6100e-03,  3.5386e-02,  5.3901e-02,  7.9546e-02,  1.0110e-02,\n",
       "                       3.2475e-02, -6.5605e-02,  1.2432e-03,  7.3084e-02, -3.7353e-02,\n",
       "                      -3.9273e-02, -4.0209e-02, -2.3815e-02, -1.3520e-02,  1.4656e-02,\n",
       "                      -1.0395e-01,  7.0657e-02,  2.2809e-02, -5.1198e-02, -9.7188e-02,\n",
       "                       4.8962e-02, -7.0328e-02,  3.3572e-03,  1.2720e-02, -4.0161e-02,\n",
       "                       6.4202e-02, -1.2618e-02, -2.4696e-03, -2.1511e-02, -1.5008e-03,\n",
       "                       3.5239e-03,  1.0426e-02,  3.6552e-02, -5.4438e-02,  8.3775e-02,\n",
       "                      -8.0589e-05,  9.5060e-02, -6.4964e-02,  5.6973e-03, -2.1578e-02,\n",
       "                       1.4726e-01,  5.8802e-02,  3.5765e-02,  5.7005e-02, -3.2512e-02,\n",
       "                      -5.4866e-02, -1.7250e-02, -8.0528e-02, -2.2462e-02,  1.9215e-02,\n",
       "                       2.3926e-02,  6.0454e-02, -3.5376e-02,  8.7296e-02,  5.7076e-02,\n",
       "                      -3.1860e-02,  4.3912e-02, -8.7441e-02,  7.1861e-02, -1.5469e-02,\n",
       "                       3.0706e-02, -6.9766e-02,  6.3231e-05,  4.0662e-02,  9.1830e-02,\n",
       "                      -4.0229e-02,  3.9325e-03, -3.0188e-03, -1.8936e-02,  4.9057e-02,\n",
       "                      -3.3257e-02,  6.2114e-02, -8.5591e-02, -5.9056e-02,  4.0718e-02,\n",
       "                       8.6363e-02,  9.7373e-02, -1.1156e-02,  1.0592e-01,  1.1830e-01,\n",
       "                       4.3745e-02,  4.9651e-03, -1.2667e-01, -9.7762e-03, -5.6192e-03,\n",
       "                       4.2833e-02, -2.0240e-02,  3.0070e-02, -1.0364e-02,  1.3643e-02,\n",
       "                       8.3237e-02, -8.6324e-02,  2.5990e-02,  4.4994e-02,  1.9245e-02,\n",
       "                      -7.5568e-03,  1.4475e-02, -1.9825e-02,  4.3644e-02,  4.9981e-02,\n",
       "                       1.2906e-02,  5.8965e-02, -1.5770e-02,  6.0328e-02, -9.3369e-03,\n",
       "                       3.6326e-02, -4.0814e-02, -9.3929e-03, -7.7763e-02, -1.1469e-02,\n",
       "                      -3.4576e-02, -8.3946e-02,  2.7532e-02,  7.4938e-02, -5.9499e-02,\n",
       "                       1.0416e-01, -1.7032e-02, -2.0950e-02,  1.1551e-02,  3.6799e-02,\n",
       "                       8.9998e-02, -4.6859e-02,  1.9041e-02, -8.4122e-02,  2.2968e-02,\n",
       "                       3.7520e-02,  3.2231e-02, -1.0489e-02, -9.3641e-02, -2.1086e-02,\n",
       "                      -3.9102e-02, -1.2644e-01, -1.2276e-01,  1.7323e-02, -9.7468e-02,\n",
       "                      -3.0984e-02, -5.9732e-02,  9.9664e-04, -8.1825e-03, -1.2125e-02,\n",
       "                      -2.6748e-02,  4.7265e-02, -3.7379e-02,  5.8482e-02,  1.4229e-02,\n",
       "                       9.5612e-03,  1.5545e-02,  1.5705e-02, -1.5055e-03,  1.9348e-03,\n",
       "                      -1.7780e-02,  7.9330e-02,  7.2859e-02, -3.5286e-02, -5.6507e-02,\n",
       "                      -1.2258e-01,  3.5825e-02,  1.6357e-03, -7.1032e-03,  2.5476e-02,\n",
       "                      -5.7906e-03, -4.0024e-02, -2.3694e-02, -3.0577e-02,  7.1593e-02,\n",
       "                      -8.8519e-03, -1.0193e-02, -1.1970e-01,  4.5172e-02, -5.2882e-02,\n",
       "                       2.7930e-02,  7.7470e-03,  3.6990e-02,  8.9294e-03,  2.4214e-02,\n",
       "                       8.2196e-02,  1.0939e-01, -9.4376e-03, -9.5986e-03,  2.3221e-02,\n",
       "                      -6.7160e-02,  1.1438e-02, -5.0537e-02, -5.6940e-03, -3.1271e-02,\n",
       "                      -7.6153e-02, -6.8065e-02, -7.8205e-02, -1.6013e-02,  1.8191e-02,\n",
       "                      -9.2637e-02,  5.3360e-02, -3.5526e-02,  2.8736e-02, -1.1981e-03,\n",
       "                       1.1839e-01,  2.3679e-02, -8.1725e-02, -2.6819e-02,  7.3450e-03,\n",
       "                       4.7011e-02,  5.5636e-02,  4.7612e-02, -3.6452e-02,  5.1658e-03,\n",
       "                      -5.0596e-02, -1.8247e-02, -8.0444e-02,  9.7995e-03, -5.3501e-02,\n",
       "                       1.1158e-01, -4.8618e-02,  2.2077e-03,  7.5301e-02, -1.2339e-02,\n",
       "                      -2.1470e-02, -5.2096e-02,  6.3499e-02, -6.2013e-03, -3.6461e-02,\n",
       "                       7.8202e-03,  5.4041e-02, -3.5687e-03, -7.6985e-02,  1.7104e-02,\n",
       "                       1.0383e-02,  3.0228e-02,  3.4454e-02, -2.1629e-02, -1.1442e-01,\n",
       "                       2.8965e-02,  9.2750e-02, -1.2584e-01, -6.3703e-02,  1.0829e-02,\n",
       "                       2.3798e-02,  6.3212e-02, -5.1057e-02,  5.3810e-03,  3.6453e-02,\n",
       "                      -1.5513e-02,  2.0709e-02,  2.9589e-03,  2.6432e-02,  5.7055e-02,\n",
       "                      -6.7759e-02, -1.7684e-02, -1.4743e-03,  9.5453e-02,  3.0296e-03,\n",
       "                      -2.4721e-02,  4.5216e-02, -1.5399e-02,  6.3815e-02,  6.4419e-02,\n",
       "                      -9.3023e-03,  2.4490e-02, -1.1262e-01, -6.9855e-02, -3.2781e-03,\n",
       "                       3.1167e-02, -3.1020e-02, -4.4279e-02,  8.4360e-02,  2.0864e-02,\n",
       "                      -2.4693e-02, -5.0328e-02, -5.2325e-03, -3.0752e-02, -1.1450e-01,\n",
       "                       1.2221e-01,  7.1188e-03, -1.4129e-04, -7.1069e-02,  3.4862e-02,\n",
       "                      -1.2472e-01, -2.5776e-02, -1.0917e-01, -2.9027e-03, -1.6470e-02,\n",
       "                      -4.7674e-02,  8.4315e-02])),\n",
       "             ('conv_block3.1.running_var',\n",
       "              tensor([0.9358, 0.9492, 0.9520, 0.9440, 0.9360, 0.9355, 0.9292, 0.9521, 0.9310,\n",
       "                      0.9283, 0.9494, 0.9352, 0.9289, 0.9414, 0.9297, 0.9549, 0.9284, 0.9588,\n",
       "                      0.9343, 0.9292, 0.9348, 0.9316, 0.9349, 0.9337, 0.9408, 0.9519, 0.9405,\n",
       "                      0.9613, 0.9542, 0.9358, 0.9397, 0.9406, 0.9333, 0.9406, 0.9372, 0.9353,\n",
       "                      0.9339, 0.9395, 0.9387, 0.9318, 0.9295, 0.9373, 0.9304, 0.9446, 0.9422,\n",
       "                      0.9391, 0.9316, 0.9307, 0.9329, 0.9331, 0.9313, 0.9424, 0.9357, 0.9328,\n",
       "                      0.9304, 0.9550, 0.9400, 0.9289, 0.9398, 0.9294, 0.9297, 0.9338, 0.9361,\n",
       "                      0.9327, 0.9497, 0.9291, 0.9278, 0.9274, 0.9351, 0.9406, 0.9289, 0.9281,\n",
       "                      0.9405, 0.9270, 0.9385, 0.9401, 0.9360, 0.9357, 0.9331, 0.9320, 0.9270,\n",
       "                      0.9303, 0.9332, 0.9349, 0.9340, 0.9521, 0.9287, 0.9254, 0.9758, 0.9364,\n",
       "                      0.9326, 0.9356, 0.9347, 0.9492, 0.9486, 0.9381, 0.9247, 0.9418, 0.9310,\n",
       "                      0.9236, 0.9287, 0.9459, 0.9684, 0.9297, 0.9283, 0.9538, 0.9392, 0.9281,\n",
       "                      0.9302, 0.9401, 0.9344, 0.9310, 0.9361, 0.9385, 0.9395, 0.9335, 0.9307,\n",
       "                      0.9376, 0.9282, 0.9274, 0.9393, 0.9332, 0.9480, 0.9366, 0.9381, 0.9417,\n",
       "                      0.9349, 0.9399, 0.9376, 0.9572, 0.9298, 0.9290, 0.9369, 0.9335, 0.9374,\n",
       "                      0.9652, 0.9360, 0.9235, 0.9333, 0.9392, 0.9265, 0.9316, 0.9490, 0.9275,\n",
       "                      0.9371, 0.9424, 0.9547, 0.9385, 0.9322, 0.9326, 0.9372, 0.9488, 0.9475,\n",
       "                      0.9444, 0.9484, 0.9679, 0.9298, 0.9338, 0.9420, 0.9361, 0.9454, 0.9482,\n",
       "                      0.9426, 0.9364, 0.9402, 0.9406, 0.9291, 0.9398, 0.9473, 0.9283, 0.9242,\n",
       "                      0.9289, 0.9308, 0.9383, 0.9286, 0.9354, 0.9211, 0.9566, 0.9507, 0.9289,\n",
       "                      0.9387, 0.9444, 0.9280, 0.9265, 0.9335, 0.9377, 0.9437, 0.9285, 0.9382,\n",
       "                      0.9310, 0.9295, 0.9344, 0.9429, 0.9392, 0.9297, 0.9317, 0.9447, 0.9536,\n",
       "                      0.9393, 0.9605, 0.9452, 0.9316, 0.9442, 0.9342, 0.9553, 0.9340, 0.9321,\n",
       "                      0.9349, 0.9352, 0.9387, 0.9489, 0.9434, 0.9270, 0.9311, 0.9315, 0.9288,\n",
       "                      0.9334, 0.9283, 0.9294, 0.9662, 0.9445, 0.9378, 0.9320, 0.9326, 0.9479,\n",
       "                      0.9402, 0.9358, 0.9346, 0.9417, 0.9559, 0.9309, 0.9668, 0.9349, 0.9614,\n",
       "                      0.9310, 0.9310, 0.9482, 0.9327, 0.9335, 0.9374, 0.9274, 0.9408, 0.9338,\n",
       "                      0.9401, 0.9336, 0.9434, 0.9345, 0.9382, 0.9419, 0.9569, 0.9448, 0.9324,\n",
       "                      0.9315, 0.9321, 0.9414, 0.9384, 0.9385, 0.9427, 0.9403, 0.9307, 0.9400,\n",
       "                      0.9390, 0.9310, 0.9353, 0.9359, 0.9292, 0.9440, 0.9328, 0.9386, 0.9557,\n",
       "                      0.9469, 0.9311, 0.9525, 0.9349, 0.9431, 0.9436, 0.9288, 0.9431, 0.9282,\n",
       "                      0.9315, 0.9423, 0.9284, 0.9334, 0.9432, 0.9363, 0.9256, 0.9310, 0.9435,\n",
       "                      0.9518, 0.9389, 0.9287, 0.9398, 0.9278, 0.9361, 0.9460, 0.9319, 0.9289,\n",
       "                      0.9416, 0.9313, 0.9372, 0.9304, 0.9309, 0.9405, 0.9305, 0.9303, 0.9430,\n",
       "                      0.9629, 0.9277, 0.9323, 0.9684, 0.9356, 0.9335, 0.9533, 0.9333, 0.9346,\n",
       "                      0.9440, 0.9335, 0.9254, 0.9429, 0.9343, 0.9532, 0.9400, 0.9312, 0.9394,\n",
       "                      0.9255, 0.9373, 0.9282, 0.9392, 0.9281, 0.9365, 0.9348, 0.9247, 0.9296,\n",
       "                      0.9346, 0.9322, 0.9306, 0.9380, 0.9302, 0.9343, 0.9351, 0.9380, 0.9311,\n",
       "                      0.9343, 0.9465, 0.9369, 0.9426, 0.9440, 0.9398, 0.9520, 0.9288, 0.9503,\n",
       "                      0.9339, 0.9302, 0.9397, 0.9351, 0.9256, 0.9306, 0.9338, 0.9262, 0.9331,\n",
       "                      0.9461, 0.9621, 0.9599, 0.9309, 0.9594, 0.9378, 0.9384, 0.9332, 0.9351,\n",
       "                      0.9305, 0.9394, 0.9388, 0.9361, 0.9316, 0.9286, 0.9329, 0.9436, 0.9320,\n",
       "                      0.9483, 0.9228, 0.9335, 0.9303, 0.9413, 0.9424, 0.9324, 0.9632, 0.9322,\n",
       "                      0.9318, 0.9436, 0.9380, 0.9254, 0.9380, 0.9302, 0.9427, 0.9424, 0.9285,\n",
       "                      0.9268, 0.9537, 0.9431, 0.9311, 0.9373, 0.9357, 0.9515, 0.9365, 0.9361,\n",
       "                      0.9419, 0.9506, 0.9296, 0.9360, 0.9296, 0.9366, 0.9528, 0.9430, 0.9302,\n",
       "                      0.9302, 0.9576, 0.9472, 0.9485, 0.9329, 0.9302, 0.9426, 0.9410, 0.9406,\n",
       "                      0.9345, 0.9319, 0.9575, 0.9276, 0.9654, 0.9373, 0.9303, 0.9300, 0.9395,\n",
       "                      0.9444, 0.9331, 0.9330, 0.9395, 0.9407, 0.9584, 0.9324, 0.9403, 0.9397,\n",
       "                      0.9366, 0.9321, 0.9552, 0.9304, 0.9384, 0.9346, 0.9416, 0.9408, 0.9416,\n",
       "                      0.9274, 0.9337, 0.9331, 0.9424, 0.9264, 0.9308, 0.9373, 0.9320, 0.9429,\n",
       "                      0.9327, 0.9290, 0.9514, 0.9584, 0.9442, 0.9315, 0.9352, 0.9417, 0.9322,\n",
       "                      0.9302, 0.9290, 0.9276, 0.9370, 0.9244, 0.9258, 0.9332, 0.9472, 0.9386,\n",
       "                      0.9417, 0.9450, 0.9261, 0.9307, 0.9287, 0.9306, 0.9445, 0.9398, 0.9479,\n",
       "                      0.9271, 0.9408, 0.9542, 0.9301, 0.9505, 0.9278, 0.9349, 0.9472, 0.9231,\n",
       "                      0.9291, 0.9444, 0.9378, 0.9331, 0.9443, 0.9543, 0.9271, 0.9376, 0.9385,\n",
       "                      0.9259, 0.9575, 0.9387, 0.9383, 0.9388, 0.9308, 0.9406, 0.9395])),\n",
       "             ('conv_block3.1.num_batches_tracked', tensor(1)),\n",
       "             ('conv_block3.3.weight',\n",
       "              tensor([[[[-2.6604e-03,  8.6772e-03,  1.2242e-02],\n",
       "                        [-3.8419e-03,  2.2458e-02, -1.3218e-02],\n",
       "                        [ 1.6070e-02,  2.1348e-02,  6.9980e-03]],\n",
       "              \n",
       "                       [[-2.5254e-02,  2.2437e-02,  8.0154e-04],\n",
       "                        [ 1.7472e-02,  7.1532e-03,  4.7639e-03],\n",
       "                        [ 1.0766e-02,  4.3669e-03,  1.8874e-02]],\n",
       "              \n",
       "                       [[-1.2534e-02, -1.7629e-02,  4.4831e-03],\n",
       "                        [-8.2104e-03,  1.9175e-02,  1.8008e-02],\n",
       "                        [ 2.1422e-02, -8.5697e-03,  1.7947e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-1.8967e-02, -9.5874e-03, -5.1335e-04],\n",
       "                        [ 3.8086e-03,  4.8248e-04, -5.5461e-03],\n",
       "                        [-2.3654e-02,  2.4097e-02,  2.7501e-03]],\n",
       "              \n",
       "                       [[ 8.7313e-03,  1.5239e-02,  2.0811e-02],\n",
       "                        [-8.1754e-03,  1.3874e-02,  1.7556e-02],\n",
       "                        [-1.7154e-02, -4.9310e-03, -8.8111e-03]],\n",
       "              \n",
       "                       [[ 1.6539e-02, -2.4244e-02,  1.6037e-02],\n",
       "                        [-2.1250e-03, -2.4799e-02, -2.6508e-03],\n",
       "                        [-1.9674e-02,  2.0752e-03,  1.8929e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[-1.9739e-02, -2.6953e-03,  1.2064e-02],\n",
       "                        [-2.1725e-02, -1.0342e-02, -8.2874e-03],\n",
       "                        [-1.2776e-03, -6.6581e-03, -1.0970e-02]],\n",
       "              \n",
       "                       [[-7.0014e-03, -1.4933e-02, -4.4335e-04],\n",
       "                        [-1.5977e-02, -1.6395e-03, -1.0338e-03],\n",
       "                        [ 1.6576e-02,  1.9454e-02, -1.7690e-02]],\n",
       "              \n",
       "                       [[-1.8790e-02, -1.8614e-02,  2.3533e-02],\n",
       "                        [-2.1660e-02, -1.0541e-02,  2.4202e-02],\n",
       "                        [-1.2505e-02, -1.7101e-02,  1.6358e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-1.1862e-02,  1.3153e-02, -6.3629e-03],\n",
       "                        [-8.2646e-03, -2.1756e-03,  1.6547e-02],\n",
       "                        [-7.6578e-03, -6.7924e-03, -1.2229e-02]],\n",
       "              \n",
       "                       [[-1.7316e-03,  4.3716e-03,  1.0029e-02],\n",
       "                        [ 1.0583e-02, -1.4007e-02, -1.6691e-03],\n",
       "                        [ 1.5004e-02,  2.0984e-02,  1.1834e-02]],\n",
       "              \n",
       "                       [[ 3.8824e-03, -1.6101e-02, -1.6563e-02],\n",
       "                        [ 1.8272e-02,  1.8440e-02,  8.5964e-03],\n",
       "                        [ 1.9140e-02,  2.3916e-02,  1.8843e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[-9.6784e-03,  1.4604e-02, -2.1051e-02],\n",
       "                        [ 8.8737e-03,  1.7001e-02,  2.2225e-02],\n",
       "                        [ 3.7245e-03, -4.4522e-03,  2.6634e-03]],\n",
       "              \n",
       "                       [[-7.4267e-03, -2.6087e-03, -6.6577e-03],\n",
       "                        [ 6.6698e-03, -1.1998e-04,  8.1814e-03],\n",
       "                        [ 1.9848e-02, -1.1188e-02,  2.5012e-02]],\n",
       "              \n",
       "                       [[-8.9267e-03, -1.4411e-02, -2.1008e-02],\n",
       "                        [-5.8333e-04, -2.4941e-02,  5.4566e-03],\n",
       "                        [ 1.0971e-02,  8.0209e-03, -2.0805e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[-9.7331e-03, -2.1639e-02, -1.0539e-02],\n",
       "                        [ 7.8918e-03,  1.5965e-02, -1.4785e-02],\n",
       "                        [ 1.9247e-02, -2.1459e-02,  4.5944e-04]],\n",
       "              \n",
       "                       [[ 1.6978e-02,  2.4760e-02,  1.0666e-02],\n",
       "                        [ 2.0675e-02,  1.1489e-02,  5.5577e-03],\n",
       "                        [-1.0229e-02, -1.4788e-02, -1.5969e-02]],\n",
       "              \n",
       "                       [[-1.2863e-02,  1.5839e-02, -1.9258e-02],\n",
       "                        [-7.2141e-03,  9.1885e-03, -2.1679e-02],\n",
       "                        [ 1.7919e-02,  1.4881e-02, -1.9501e-02]]],\n",
       "              \n",
       "              \n",
       "                      ...,\n",
       "              \n",
       "              \n",
       "                      [[[ 7.3630e-03, -2.0005e-02, -8.6422e-03],\n",
       "                        [-2.1041e-02,  1.3725e-02, -9.6725e-03],\n",
       "                        [-1.8469e-02, -8.4605e-03,  1.6248e-02]],\n",
       "              \n",
       "                       [[-2.3266e-02, -8.4166e-03, -2.5808e-03],\n",
       "                        [-4.2150e-03, -8.2727e-03,  1.6960e-02],\n",
       "                        [-7.4781e-03, -9.5120e-03,  1.9219e-03]],\n",
       "              \n",
       "                       [[-1.5937e-02, -9.3501e-03,  1.9125e-02],\n",
       "                        [-1.7287e-02,  5.9892e-03, -5.6678e-03],\n",
       "                        [-1.1892e-02,  1.0340e-02, -9.1772e-03]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 2.2837e-02,  2.4755e-02, -2.1803e-02],\n",
       "                        [ 1.8132e-03,  2.1565e-02,  2.0028e-02],\n",
       "                        [ 2.3783e-02, -2.0371e-02,  2.3946e-02]],\n",
       "              \n",
       "                       [[ 1.5268e-02,  1.1521e-02,  2.2371e-03],\n",
       "                        [ 1.4420e-02,  2.3278e-02,  2.2435e-02],\n",
       "                        [-2.4144e-02,  3.0519e-03,  2.0771e-02]],\n",
       "              \n",
       "                       [[ 8.8367e-03,  1.8608e-02, -1.3057e-02],\n",
       "                        [-1.4286e-02,  1.9353e-02,  2.5156e-02],\n",
       "                        [ 2.9766e-03,  1.3782e-02,  7.5405e-04]]],\n",
       "              \n",
       "              \n",
       "                      [[[-1.7043e-02, -1.9450e-02,  1.1536e-03],\n",
       "                        [ 1.4387e-02, -1.9641e-02,  1.6249e-02],\n",
       "                        [-1.8719e-02,  1.0636e-02,  1.4277e-03]],\n",
       "              \n",
       "                       [[ 3.0112e-03, -1.4338e-02, -1.7267e-02],\n",
       "                        [-1.9342e-02, -1.8596e-02,  9.0424e-03],\n",
       "                        [ 3.7758e-03, -2.3949e-02,  2.5424e-02]],\n",
       "              \n",
       "                       [[-1.9121e-02,  2.3574e-02,  6.2864e-03],\n",
       "                        [-1.2864e-02, -4.1643e-03, -2.0559e-02],\n",
       "                        [ 2.1115e-02,  9.3234e-04, -1.2294e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 1.0737e-02,  1.4664e-03,  1.0137e-02],\n",
       "                        [-9.6328e-03,  2.4932e-02, -9.4316e-03],\n",
       "                        [ 1.9150e-02,  1.3914e-02, -9.7998e-03]],\n",
       "              \n",
       "                       [[ 1.8056e-02, -2.0134e-02, -1.4529e-02],\n",
       "                        [ 2.4028e-02,  4.9234e-03,  9.9492e-03],\n",
       "                        [ 8.8387e-03, -1.1480e-02,  1.9890e-02]],\n",
       "              \n",
       "                       [[ 6.4205e-03,  4.8392e-03,  1.2881e-03],\n",
       "                        [ 6.2791e-03, -2.1506e-02,  1.4537e-02],\n",
       "                        [ 2.3486e-02, -1.6466e-02,  1.2539e-02]]],\n",
       "              \n",
       "              \n",
       "                      [[[-1.3976e-02, -1.1183e-02, -2.2819e-02],\n",
       "                        [ 1.2056e-02,  1.1152e-02,  2.5771e-03],\n",
       "                        [ 2.1558e-02,  2.5220e-02, -1.3387e-02]],\n",
       "              \n",
       "                       [[ 2.1692e-02, -1.1144e-02,  2.7635e-03],\n",
       "                        [-2.4478e-02,  9.9464e-03,  2.4088e-02],\n",
       "                        [ 1.0706e-02,  4.9660e-03, -1.0350e-02]],\n",
       "              \n",
       "                       [[ 7.0408e-03, -4.6089e-03, -1.0565e-02],\n",
       "                        [ 2.1349e-02, -1.0449e-02, -2.5091e-02],\n",
       "                        [-1.7311e-02, -3.0379e-04,  2.3122e-02]],\n",
       "              \n",
       "                       ...,\n",
       "              \n",
       "                       [[ 9.3324e-03, -5.0768e-03,  9.5671e-03],\n",
       "                        [ 1.3243e-02,  4.1549e-03,  3.0894e-03],\n",
       "                        [-9.6194e-03, -9.5552e-03,  1.9312e-02]],\n",
       "              \n",
       "                       [[-3.8267e-05,  1.7124e-02, -1.6547e-02],\n",
       "                        [-1.2647e-02, -2.2097e-02, -1.8303e-02],\n",
       "                        [-1.0270e-02,  7.9962e-04, -1.3308e-02]],\n",
       "              \n",
       "                       [[ 1.9522e-02,  2.1003e-02, -1.8797e-02],\n",
       "                        [-2.1225e-02,  1.3463e-02,  2.5987e-03],\n",
       "                        [-2.3950e-02,  3.5670e-03, -3.7123e-04]]]])),\n",
       "             ('conv_block3.3.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.4.weight',\n",
       "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
       "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
       "             ('conv_block3.4.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
       "             ('conv_block3.4.running_mean',\n",
       "              tensor([-2.3403e-02,  2.4139e-02, -1.2604e-03, -1.5963e-02, -2.5160e-02,\n",
       "                      -3.0243e-02,  4.5972e-02, -2.5679e-02,  3.3627e-02,  3.8063e-02,\n",
       "                      -3.2314e-02,  4.3665e-02, -1.7901e-02, -2.5865e-02,  5.1025e-03,\n",
       "                      -4.8812e-02,  3.0781e-02, -5.4097e-02,  2.8306e-03,  1.4326e-02,\n",
       "                       6.1618e-02,  1.6750e-02, -1.6555e-02, -2.2929e-02, -2.3626e-02,\n",
       "                       3.0484e-02, -3.0941e-02, -7.0117e-03, -7.5982e-02, -2.5775e-04,\n",
       "                      -4.4268e-02, -4.3949e-02,  5.3621e-02, -2.2072e-02, -7.6692e-03,\n",
       "                       2.4993e-02, -2.6241e-03, -1.2113e-02, -1.4135e-02, -8.0844e-03,\n",
       "                       7.4674e-02, -1.1912e-02,  3.1021e-02,  3.1561e-02, -6.6172e-02,\n",
       "                       1.0154e-02,  7.8380e-03,  8.1849e-02, -7.8972e-03, -1.5719e-02,\n",
       "                       4.2811e-02,  9.5408e-03,  4.1607e-02,  1.7046e-02, -8.7318e-03,\n",
       "                       4.2189e-02, -5.2559e-02, -5.5273e-02, -3.2400e-02, -1.2861e-02,\n",
       "                       2.6647e-02, -5.8569e-03, -3.6768e-02,  4.7585e-02,  2.7160e-02,\n",
       "                      -3.8463e-03,  1.2885e-02,  2.8676e-02,  7.7267e-02, -2.9734e-02,\n",
       "                       8.1587e-02,  4.5288e-02,  3.6658e-02, -2.0722e-02, -8.0361e-02,\n",
       "                      -3.4539e-02,  2.7198e-02,  5.7264e-02,  3.4673e-02,  9.2045e-03,\n",
       "                      -3.1113e-02, -1.9879e-02,  1.7322e-02, -1.8221e-02, -7.4506e-02,\n",
       "                      -6.7461e-02,  1.1526e-01,  3.3437e-02, -4.5892e-02, -9.9543e-03,\n",
       "                      -6.2539e-02, -3.3452e-02, -2.6106e-02,  2.4252e-02, -2.7888e-02,\n",
       "                      -2.8089e-02,  1.6590e-02, -2.0094e-02, -3.6624e-02, -4.1363e-03,\n",
       "                       3.5046e-02,  3.8223e-02, -2.9115e-02,  3.5813e-02,  2.9664e-02,\n",
       "                       1.2054e-02, -6.9136e-02, -2.8290e-02,  2.2567e-02, -1.5092e-02,\n",
       "                       1.1275e-02, -2.0010e-02,  9.1300e-03, -4.2870e-02, -2.3731e-02,\n",
       "                      -3.8268e-02, -9.0044e-03,  7.8264e-03, -4.6011e-03,  4.1863e-02,\n",
       "                       2.5460e-02, -5.4645e-04,  3.2968e-02, -4.1624e-02,  4.0610e-02,\n",
       "                       8.8573e-03,  1.4416e-02,  1.1098e-02, -6.8866e-02,  2.6654e-02,\n",
       "                       4.2163e-03, -1.8799e-02, -2.4828e-02,  1.5583e-02,  1.5741e-02,\n",
       "                       3.3135e-02,  1.1871e-02,  1.1249e-02,  8.7920e-03, -5.8101e-02,\n",
       "                      -1.2124e-03,  4.0779e-02,  1.8346e-02,  6.0822e-02, -3.0466e-02,\n",
       "                       1.8072e-02,  9.8246e-03,  9.3839e-03, -4.2388e-02,  9.7393e-02,\n",
       "                       8.1442e-02, -8.1226e-03, -2.1061e-02,  5.8241e-04, -8.4418e-03,\n",
       "                      -1.3285e-02, -5.6527e-02,  2.9582e-02,  2.1796e-02,  2.6568e-02,\n",
       "                       5.0428e-02, -1.6875e-02,  4.1718e-02, -7.1548e-03,  2.8811e-03,\n",
       "                       4.2174e-02, -4.2095e-02,  4.1284e-02, -2.5519e-02, -5.6133e-02,\n",
       "                       1.7806e-02,  1.7213e-02,  1.6800e-02,  4.4488e-02, -1.6354e-02,\n",
       "                       4.3072e-02,  4.7127e-02, -3.3489e-02, -3.7254e-02, -9.0057e-03,\n",
       "                      -7.5176e-02,  2.4992e-02,  1.4230e-02, -1.1200e-02,  1.3964e-02,\n",
       "                       8.5414e-02, -1.2896e-02,  6.4657e-03,  8.5051e-03, -6.1093e-02,\n",
       "                      -2.7084e-02,  1.5054e-02,  2.4903e-02,  2.9436e-02, -2.9919e-03,\n",
       "                      -2.9321e-02, -3.3948e-02, -1.3067e-02,  1.4625e-02,  2.9663e-02,\n",
       "                      -3.0287e-02,  2.6955e-03,  3.3410e-03, -4.2774e-02, -1.9731e-02,\n",
       "                       8.5203e-03,  1.8760e-02,  8.9696e-02,  2.5978e-02,  1.8205e-02,\n",
       "                      -2.6340e-02, -3.0417e-02, -4.5100e-02, -1.8864e-02, -1.9062e-02,\n",
       "                       7.6999e-03,  2.3412e-02, -4.0600e-02, -1.6778e-02,  2.1488e-02,\n",
       "                      -1.9228e-02,  2.1543e-02, -3.9870e-02,  5.0134e-03,  4.7040e-03,\n",
       "                       6.2279e-02,  1.5925e-02, -2.8362e-03,  1.6000e-02,  1.2461e-02,\n",
       "                      -1.7015e-02, -6.6849e-02,  4.1117e-02,  4.8141e-02,  3.6255e-02,\n",
       "                      -4.4734e-02,  3.5321e-02, -3.1685e-02, -1.0294e-02,  6.8980e-02,\n",
       "                       4.3247e-02,  3.7649e-02,  5.9397e-02,  4.5517e-02, -1.1671e-03,\n",
       "                      -1.1961e-02,  5.5634e-02, -7.7889e-03,  6.9783e-03, -4.4990e-02,\n",
       "                      -1.8674e-02,  8.1129e-03, -2.2296e-02,  4.8493e-03, -1.6281e-02,\n",
       "                       2.3729e-02, -1.6135e-02,  6.4140e-03,  6.6366e-02,  3.1809e-02,\n",
       "                      -1.5495e-02, -1.8702e-02, -1.2957e-02,  4.4985e-02,  3.2650e-02,\n",
       "                       2.2188e-02,  2.9505e-02,  2.0172e-02, -1.3129e-03,  3.6024e-02,\n",
       "                      -5.0469e-02, -3.5785e-02,  3.1227e-02, -3.5676e-02, -8.6060e-03,\n",
       "                       3.8816e-02, -1.8786e-02,  4.1569e-03, -5.9919e-02, -3.5145e-02,\n",
       "                      -5.2606e-02, -8.9504e-03, -5.3252e-02,  2.7214e-02, -2.3680e-02,\n",
       "                       4.9956e-04,  2.0728e-02, -2.5779e-02,  3.3720e-05,  5.7408e-02,\n",
       "                       3.3836e-02,  6.7930e-03, -3.6046e-03, -7.5058e-02, -4.3060e-02,\n",
       "                       9.7783e-03,  7.8825e-03, -7.3300e-02,  2.0633e-02,  1.9164e-02,\n",
       "                      -5.2440e-02,  1.5060e-02, -4.0156e-02, -4.0595e-02,  2.5108e-02,\n",
       "                      -3.7302e-02,  5.0860e-03, -2.3332e-02, -7.3122e-03,  4.9057e-02,\n",
       "                      -2.3419e-02, -2.0008e-02, -2.2051e-02,  2.5901e-03,  2.9332e-02,\n",
       "                      -6.6551e-03,  6.4585e-03, -2.5566e-02,  2.3507e-02, -3.3708e-02,\n",
       "                       8.7325e-02,  2.7329e-02,  3.8781e-02, -3.1840e-02,  8.7655e-03,\n",
       "                      -1.5713e-02, -1.2795e-02, -1.3217e-04, -1.4042e-02,  2.4019e-02,\n",
       "                      -2.7642e-02, -2.0896e-02,  1.5069e-03, -1.1335e-02, -3.4462e-02,\n",
       "                       2.1913e-02,  3.1351e-02,  1.5801e-02,  2.6438e-02,  3.4509e-03,\n",
       "                       2.1040e-02,  5.8943e-03, -6.6306e-04, -1.7146e-02, -9.3626e-03,\n",
       "                      -6.7777e-02,  1.3434e-02,  9.3213e-03, -3.5201e-02,  8.0498e-03,\n",
       "                      -5.5928e-02, -4.1310e-02,  1.4588e-02,  7.8801e-03,  2.5068e-02,\n",
       "                       5.9731e-02,  2.7208e-02,  9.6218e-03, -3.8185e-02,  7.6180e-03,\n",
       "                       4.6250e-02, -4.4318e-02,  6.5064e-02,  2.6925e-03, -2.0373e-02,\n",
       "                       4.3078e-02,  4.0704e-03,  5.8240e-02,  4.4589e-02, -7.0906e-02,\n",
       "                       4.2746e-02, -3.1032e-02,  6.0927e-03, -2.3549e-02, -1.3576e-02,\n",
       "                       2.2701e-02, -2.6563e-02,  1.2889e-03,  2.9832e-02,  6.7199e-03,\n",
       "                       6.2454e-02, -1.9497e-02, -1.3611e-02,  6.4973e-03,  1.8110e-02,\n",
       "                      -6.3006e-03, -5.1195e-02,  2.7320e-02,  3.1534e-02,  5.1201e-02,\n",
       "                      -1.2477e-02,  4.0396e-02, -7.2006e-03,  4.9015e-02, -3.5495e-02,\n",
       "                      -3.3213e-02, -4.2789e-02,  3.0888e-02,  3.1199e-02,  5.0967e-02,\n",
       "                      -9.2313e-03, -1.4844e-02,  2.1399e-02, -1.9234e-02, -6.9110e-02,\n",
       "                      -3.0066e-02, -6.3985e-02, -1.8422e-02,  7.7262e-02, -2.4376e-02,\n",
       "                      -3.4040e-02,  8.2357e-02, -2.5837e-02, -6.1169e-02,  4.7719e-03,\n",
       "                       1.4743e-02, -3.1573e-02,  5.0817e-02,  2.7456e-02,  4.0166e-02,\n",
       "                      -3.6707e-02,  1.4107e-02, -2.1221e-02, -1.0940e-02,  7.9945e-02,\n",
       "                       2.7209e-02, -3.3171e-02, -1.2970e-02, -5.0313e-03, -8.7946e-03,\n",
       "                      -6.1544e-03, -1.6021e-02, -3.8913e-02,  3.1769e-02, -1.3591e-02,\n",
       "                      -1.0553e-02, -5.2747e-02,  3.3365e-02,  3.3581e-02, -2.0957e-02,\n",
       "                      -1.6654e-02,  7.0337e-02, -1.8454e-02,  8.2962e-02,  1.0946e-02,\n",
       "                       1.0094e-02,  9.8340e-02, -6.4923e-02,  3.0660e-02,  2.4679e-02,\n",
       "                       7.3955e-03, -1.5770e-02,  7.5012e-02,  3.4380e-02, -1.9881e-02,\n",
       "                      -5.3461e-03, -1.9905e-02,  1.5822e-02,  2.7138e-02,  6.8592e-03,\n",
       "                      -4.9422e-02,  3.2350e-03,  1.0635e-01, -3.7003e-02,  9.8262e-03,\n",
       "                       2.2124e-02, -1.8602e-02,  1.1463e-03, -5.7726e-02, -1.5134e-02,\n",
       "                      -6.8790e-03, -9.7030e-03,  3.0051e-02, -2.1641e-02, -1.3967e-02,\n",
       "                       2.3208e-02,  4.9080e-02,  1.9412e-02,  3.3669e-02,  2.4031e-02,\n",
       "                      -5.9370e-02, -1.8806e-02,  5.2329e-02, -6.7082e-03,  3.2781e-02,\n",
       "                      -4.9885e-02,  5.7193e-03, -1.9516e-02,  4.1698e-02,  2.7717e-03,\n",
       "                       2.9487e-02, -3.5807e-03, -3.5921e-02,  6.2358e-02, -2.1752e-02,\n",
       "                       1.3559e-02,  5.1287e-03, -1.8628e-02, -3.2208e-02, -8.8058e-03,\n",
       "                       2.3628e-02,  3.6533e-02, -6.5075e-03, -2.7625e-02,  9.1912e-03,\n",
       "                      -9.8106e-03, -6.3427e-02, -2.6121e-02,  4.1033e-03, -1.8817e-02,\n",
       "                      -2.8261e-03, -3.0936e-03])),\n",
       "             ('conv_block3.4.running_var',\n",
       "              tensor([0.9241, 0.9242, 0.9260, 0.9376, 0.9331, 0.9368, 0.9299, 0.9273, 0.9316,\n",
       "                      0.9318, 0.9298, 0.9348, 0.9302, 0.9241, 0.9283, 0.9467, 0.9303, 0.9328,\n",
       "                      0.9312, 0.9235, 0.9407, 0.9320, 0.9424, 0.9395, 0.9347, 0.9340, 0.9292,\n",
       "                      0.9252, 0.9318, 0.9284, 0.9316, 0.9317, 0.9303, 0.9315, 0.9325, 0.9316,\n",
       "                      0.9233, 0.9248, 0.9298, 0.9347, 0.9370, 0.9275, 0.9334, 0.9299, 0.9332,\n",
       "                      0.9307, 0.9389, 0.9357, 0.9278, 0.9354, 0.9329, 0.9319, 0.9328, 0.9278,\n",
       "                      0.9341, 0.9308, 0.9320, 0.9643, 0.9395, 0.9415, 0.9318, 0.9323, 0.9243,\n",
       "                      0.9449, 0.9284, 0.9308, 0.9260, 0.9407, 0.9335, 0.9296, 0.9329, 0.9308,\n",
       "                      0.9320, 0.9246, 0.9368, 0.9281, 0.9403, 0.9352, 0.9372, 0.9323, 0.9296,\n",
       "                      0.9340, 0.9281, 0.9337, 0.9510, 0.9338, 0.9354, 0.9304, 0.9308, 0.9264,\n",
       "                      0.9440, 0.9263, 0.9276, 0.9307, 0.9328, 0.9310, 0.9354, 0.9263, 0.9304,\n",
       "                      0.9309, 0.9359, 0.9262, 0.9325, 0.9344, 0.9344, 0.9340, 0.9337, 0.9300,\n",
       "                      0.9282, 0.9279, 0.9318, 0.9375, 0.9248, 0.9280, 0.9271, 0.9286, 0.9317,\n",
       "                      0.9286, 0.9261, 0.9318, 0.9353, 0.9281, 0.9282, 0.9278, 0.9286, 0.9363,\n",
       "                      0.9280, 0.9259, 0.9396, 0.9324, 0.9278, 0.9367, 0.9287, 0.9324, 0.9289,\n",
       "                      0.9234, 0.9343, 0.9275, 0.9288, 0.9345, 0.9312, 0.9265, 0.9322, 0.9336,\n",
       "                      0.9305, 0.9341, 0.9372, 0.9294, 0.9313, 0.9389, 0.9396, 0.9246, 0.9289,\n",
       "                      0.9316, 0.9280, 0.9302, 0.9312, 0.9392, 0.9267, 0.9262, 0.9328, 0.9322,\n",
       "                      0.9274, 0.9283, 0.9353, 0.9306, 0.9300, 0.9442, 0.9267, 0.9298, 0.9258,\n",
       "                      0.9317, 0.9374, 0.9300, 0.9311, 0.9298, 0.9285, 0.9342, 0.9301, 0.9240,\n",
       "                      0.9374, 0.9311, 0.9275, 0.9262, 0.9305, 0.9376, 0.9277, 0.9271, 0.9261,\n",
       "                      0.9294, 0.9229, 0.9237, 0.9459, 0.9364, 0.9353, 0.9306, 0.9307, 0.9338,\n",
       "                      0.9334, 0.9334, 0.9415, 0.9265, 0.9303, 0.9279, 0.9305, 0.9369, 0.9321,\n",
       "                      0.9341, 0.9394, 0.9298, 0.9326, 0.9291, 0.9258, 0.9334, 0.9304, 0.9304,\n",
       "                      0.9371, 0.9306, 0.9248, 0.9349, 0.9342, 0.9314, 0.9443, 0.9336, 0.9350,\n",
       "                      0.9296, 0.9293, 0.9237, 0.9283, 0.9289, 0.9311, 0.9330, 0.9306, 0.9390,\n",
       "                      0.9255, 0.9246, 0.9279, 0.9280, 0.9276, 0.9382, 0.9256, 0.9256, 0.9383,\n",
       "                      0.9349, 0.9288, 0.9333, 0.9336, 0.9319, 0.9247, 0.9411, 0.9310, 0.9275,\n",
       "                      0.9285, 0.9267, 0.9379, 0.9286, 0.9323, 0.9294, 0.9321, 0.9302, 0.9300,\n",
       "                      0.9353, 0.9356, 0.9269, 0.9274, 0.9335, 0.9365, 0.9332, 0.9287, 0.9254,\n",
       "                      0.9278, 0.9286, 0.9295, 0.9265, 0.9332, 0.9422, 0.9249, 0.9355, 0.9314,\n",
       "                      0.9285, 0.9305, 0.9311, 0.9314, 0.9330, 0.9392, 0.9260, 0.9275, 0.9278,\n",
       "                      0.9282, 0.9417, 0.9289, 0.9437, 0.9256, 0.9363, 0.9320, 0.9304, 0.9357,\n",
       "                      0.9273, 0.9317, 0.9348, 0.9289, 0.9292, 0.9343, 0.9362, 0.9296, 0.9295,\n",
       "                      0.9247, 0.9387, 0.9292, 0.9312, 0.9282, 0.9344, 0.9289, 0.9378, 0.9306,\n",
       "                      0.9300, 0.9328, 0.9315, 0.9308, 0.9326, 0.9575, 0.9301, 0.9256, 0.9264,\n",
       "                      0.9292, 0.9342, 0.9257, 0.9427, 0.9316, 0.9400, 0.9270, 0.9274, 0.9249,\n",
       "                      0.9240, 0.9343, 0.9293, 0.9254, 0.9335, 0.9299, 0.9274, 0.9358, 0.9293,\n",
       "                      0.9336, 0.9287, 0.9300, 0.9263, 0.9261, 0.9397, 0.9393, 0.9366, 0.9286,\n",
       "                      0.9355, 0.9296, 0.9305, 0.9385, 0.9349, 0.9285, 0.9305, 0.9461, 0.9365,\n",
       "                      0.9309, 0.9283, 0.9340, 0.9314, 0.9356, 0.9373, 0.9294, 0.9285, 0.9307,\n",
       "                      0.9507, 0.9316, 0.9319, 0.9339, 0.9272, 0.9282, 0.9295, 0.9345, 0.9345,\n",
       "                      0.9303, 0.9339, 0.9422, 0.9306, 0.9242, 0.9328, 0.9309, 0.9283, 0.9278,\n",
       "                      0.9256, 0.9288, 0.9355, 0.9322, 0.9292, 0.9290, 0.9302, 0.9443, 0.9282,\n",
       "                      0.9295, 0.9352, 0.9404, 0.9275, 0.9284, 0.9240, 0.9296, 0.9366, 0.9376,\n",
       "                      0.9228, 0.9392, 0.9304, 0.9356, 0.9230, 0.9264, 0.9312, 0.9352, 0.9322,\n",
       "                      0.9275, 0.9359, 0.9372, 0.9374, 0.9304, 0.9272, 0.9412, 0.9258, 0.9294,\n",
       "                      0.9314, 0.9371, 0.9261, 0.9311, 0.9396, 0.9281, 0.9274, 0.9293, 0.9294,\n",
       "                      0.9288, 0.9276, 0.9352, 0.9312, 0.9287, 0.9322, 0.9245, 0.9288, 0.9331,\n",
       "                      0.9451, 0.9331, 0.9359, 0.9281, 0.9293, 0.9399, 0.9350, 0.9371, 0.9468,\n",
       "                      0.9276, 0.9377, 0.9314, 0.9391, 0.9298, 0.9233, 0.9385, 0.9317, 0.9322,\n",
       "                      0.9330, 0.9328, 0.9386, 0.9330, 0.9340, 0.9342, 0.9331, 0.9287, 0.9288,\n",
       "                      0.9301, 0.9310, 0.9300, 0.9245, 0.9274, 0.9306, 0.9264, 0.9312, 0.9289,\n",
       "                      0.9318, 0.9354, 0.9255, 0.9464, 0.9298, 0.9398, 0.9271, 0.9268, 0.9310,\n",
       "                      0.9322, 0.9338, 0.9318, 0.9367, 0.9337, 0.9448, 0.9294, 0.9358, 0.9271,\n",
       "                      0.9335, 0.9271, 0.9307, 0.9254, 0.9353, 0.9357, 0.9298, 0.9282, 0.9287,\n",
       "                      0.9296, 0.9315, 0.9444, 0.9349, 0.9326, 0.9339, 0.9280, 0.9300])),\n",
       "             ('conv_block3.4.num_batches_tracked', tensor(1)),\n",
       "             ('classifier.0.weight',\n",
       "              tensor([[-7.3588e-03, -2.1914e-02, -1.9642e-02,  ...,  2.3221e-02,\n",
       "                        1.1789e-02,  1.6163e-02],\n",
       "                      [ 1.8375e-02,  6.5596e-03,  1.2149e-02,  ..., -2.1797e-02,\n",
       "                       -2.2720e-02,  1.0706e-02],\n",
       "                      [-1.3000e-02, -2.2607e-02,  1.3962e-02,  ...,  1.3345e-02,\n",
       "                       -2.2565e-02,  2.1443e-02],\n",
       "                      ...,\n",
       "                      [-1.4777e-02,  7.9751e-03,  1.7878e-02,  ..., -1.4035e-02,\n",
       "                       -2.2729e-02, -2.2604e-02],\n",
       "                      [ 6.1004e-03,  7.3328e-03,  1.5103e-02,  ..., -1.8911e-02,\n",
       "                       -1.0498e-02, -9.0639e-03],\n",
       "                      [-1.9866e-02, -4.1592e-05, -1.2398e-03,  ...,  3.8204e-04,\n",
       "                        1.5563e-02,  3.7677e-04]])),\n",
       "             ('classifier.0.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
       "             ('classifier.2.weight',\n",
       "              tensor([[ 0.0378, -0.0263, -0.0602,  ...,  0.0664, -0.0454, -0.0696],\n",
       "                      [-0.0629, -0.0328, -0.0462,  ...,  0.0223, -0.0732,  0.0417],\n",
       "                      [-0.0699, -0.0434, -0.0149,  ...,  0.0341, -0.0069, -0.0267],\n",
       "                      ...,\n",
       "                      [-0.0538, -0.0657,  0.0518,  ..., -0.0171, -0.0640,  0.0382],\n",
       "                      [-0.0539, -0.0043,  0.0107,  ...,  0.0727, -0.0112,  0.0235],\n",
       "                      [ 0.0698, -0.0345, -0.0076,  ...,  0.0612,  0.0321, -0.0629]])),\n",
       "             ('classifier.2.bias',\n",
       "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]))])"
      ]
     },
     "metadata": {},
     "execution_count": 22
    }
   ],
   "source": [
    "model.state_dict()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "wHD02aNt4pNv"
   },
   "source": [
    "# 设置交叉熵损失函数，SGD优化器"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.023837Z",
     "start_time": "2025-06-26T01:43:40.019952Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "J1dvP3ES4pNv",
    "outputId": "b3bc7688-31a1-4fee-8a45-dca723eafa16"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "损失函数: CrossEntropyLoss()\n"
     ]
    }
   ],
   "source": [
    "model = NeuralNetwork()\n",
    "# 定义损失函数和优化器\n",
    "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
    "\n",
    "print(\"损失函数:\", loss_fn)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:43:40.035848Z",
     "start_time": "2025-06-26T01:43:40.032419Z"
    },
    "id": "qUeLZMIE4pNv"
   },
   "outputs": [],
   "source": [
    "model = NeuralNetwork()\n",
    "\n",
    "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.732814Z",
     "start_time": "2025-06-26T01:43:40.035848Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 123,
     "referenced_widgets": [
      "be06c2b352c14f5c9aee8a16d5b11e24",
      "32813af8951f4c2b8dd4171a5937c5a9",
      "9cb464da4e09479292889a917a7f436a",
      "d36666c817b841f99576af80feb4a7ee",
      "12492b59648c43e4bdbe100ddb3a3702",
      "1ee3d0c200c64a17a46ee92c07e0e1b7",
      "7ab573b1c9fd48a4bb67890514744bb0",
      "8a284f1425f343c7b02cf5e060519df4",
      "c97b171f55714254a3bd3d0d73882031",
      "6e4a4ef865da440d92b72bcf10f8b877",
      "e82290e7a17643cdb2eaf7df030265f1"
     ]
    },
    "id": "qI1L-GG94pNv",
    "outputId": "eec5f542-8a40-4900-9562-4aa58ba68cb9"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "使用设备: cuda:0\n",
      "训练开始，共35200步\n"
     ]
    },
    {
     "output_type": "display_data",
     "data": {
      "text/plain": [
       "  0%|          | 0/35200 [00:00<?, ?it/s]"
      ],
      "application/vnd.jupyter.widget-view+json": {
       "version_major": 2,
       "version_minor": 0,
       "model_id": "be06c2b352c14f5c9aee8a16d5b11e24"
      }
     },
     "metadata": {}
    },
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "早停触发! 最佳验证准确率(如果是回归，这里是损失): 81.3600\n",
      "早停: 在10500 步\n"
     ]
    }
   ],
   "source": [
    "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
    "print(f\"使用设备: {device}\")\n",
    "model = model.to(device) #将模型移动到GPU\n",
    "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
    "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
    "\n",
    "\n",
    "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50, early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.737721Z",
     "start_time": "2025-06-26T01:45:37.732814Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "pJWn5FRH4pNv",
    "outputId": "bef1e508-d9ed-4843-9f49-c57b2450496d"
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "[{'loss': 0.003491724608466029, 'acc': 100.0, 'step': 10401},\n",
       " {'loss': 0.003816040698438883, 'acc': 100.0, 'step': 10402},\n",
       " {'loss': 0.0022664566058665514, 'acc': 100.0, 'step': 10403},\n",
       " {'loss': 0.0018923301249742508, 'acc': 100.0, 'step': 10404},\n",
       " {'loss': 0.0029820818454027176, 'acc': 100.0, 'step': 10405},\n",
       " {'loss': 0.004303900524973869, 'acc': 100.0, 'step': 10406},\n",
       " {'loss': 0.0022796019911766052, 'acc': 100.0, 'step': 10407},\n",
       " {'loss': 0.0030815221834927797, 'acc': 100.0, 'step': 10408},\n",
       " {'loss': 0.003537941724061966, 'acc': 100.0, 'step': 10409},\n",
       " {'loss': 0.002074974589049816, 'acc': 100.0, 'step': 10410},\n",
       " {'loss': 0.0021730950102210045, 'acc': 100.0, 'step': 10411},\n",
       " {'loss': 0.0026771053671836853, 'acc': 100.0, 'step': 10412},\n",
       " {'loss': 0.0021046821493655443, 'acc': 100.0, 'step': 10413},\n",
       " {'loss': 0.0016319683054462075, 'acc': 100.0, 'step': 10414},\n",
       " {'loss': 0.0023830805439502, 'acc': 100.0, 'step': 10415},\n",
       " {'loss': 0.0014363321242854, 'acc': 100.0, 'step': 10416},\n",
       " {'loss': 0.002298145554959774, 'acc': 100.0, 'step': 10417},\n",
       " {'loss': 0.002464946359395981, 'acc': 100.0, 'step': 10418},\n",
       " {'loss': 0.0033617469016462564, 'acc': 100.0, 'step': 10419},\n",
       " {'loss': 0.0027641693595796824, 'acc': 100.0, 'step': 10420},\n",
       " {'loss': 0.003921723924577236, 'acc': 100.0, 'step': 10421},\n",
       " {'loss': 0.0020040380768477917, 'acc': 100.0, 'step': 10422},\n",
       " {'loss': 0.0023411663714796305, 'acc': 100.0, 'step': 10423},\n",
       " {'loss': 0.0020229367073625326, 'acc': 100.0, 'step': 10424},\n",
       " {'loss': 0.0016176165081560612, 'acc': 100.0, 'step': 10425},\n",
       " {'loss': 0.0014053364284336567, 'acc': 100.0, 'step': 10426},\n",
       " {'loss': 0.0022467602975666523, 'acc': 100.0, 'step': 10427},\n",
       " {'loss': 0.0028117455076426268, 'acc': 100.0, 'step': 10428},\n",
       " {'loss': 0.0017628881614655256, 'acc': 100.0, 'step': 10429},\n",
       " {'loss': 0.0020254808478057384, 'acc': 100.0, 'step': 10430},\n",
       " {'loss': 0.002358924364671111, 'acc': 100.0, 'step': 10431},\n",
       " {'loss': 0.0019077338511124253, 'acc': 100.0, 'step': 10432},\n",
       " {'loss': 0.0017173555679619312, 'acc': 100.0, 'step': 10433},\n",
       " {'loss': 0.0028551078867167234, 'acc': 100.0, 'step': 10434},\n",
       " {'loss': 0.001715813996270299, 'acc': 100.0, 'step': 10435},\n",
       " {'loss': 0.0024141171015799046, 'acc': 100.0, 'step': 10436},\n",
       " {'loss': 0.0017758128233253956, 'acc': 100.0, 'step': 10437},\n",
       " {'loss': 0.0019963092636317015, 'acc': 100.0, 'step': 10438},\n",
       " {'loss': 0.002457571681588888, 'acc': 100.0, 'step': 10439},\n",
       " {'loss': 0.0023125477600842714, 'acc': 100.0, 'step': 10440},\n",
       " {'loss': 0.0020277928560972214, 'acc': 100.0, 'step': 10441},\n",
       " {'loss': 0.0026837456971406937, 'acc': 100.0, 'step': 10442},\n",
       " {'loss': 0.0026223028544336557, 'acc': 100.0, 'step': 10443},\n",
       " {'loss': 0.0015754913911223412, 'acc': 100.0, 'step': 10444},\n",
       " {'loss': 0.0015004329616203904, 'acc': 100.0, 'step': 10445},\n",
       " {'loss': 0.0014760923804715276, 'acc': 100.0, 'step': 10446},\n",
       " {'loss': 0.0030167759396135807, 'acc': 100.0, 'step': 10447},\n",
       " {'loss': 0.0025633256882429123, 'acc': 100.0, 'step': 10448},\n",
       " {'loss': 0.002551351673901081, 'acc': 100.0, 'step': 10449},\n",
       " {'loss': 0.0030058836564421654, 'acc': 100.0, 'step': 10450},\n",
       " {'loss': 0.0035295237321406603, 'acc': 100.0, 'step': 10451},\n",
       " {'loss': 0.0018499366706237197, 'acc': 100.0, 'step': 10452},\n",
       " {'loss': 0.0026926135178655386, 'acc': 100.0, 'step': 10453},\n",
       " {'loss': 0.001155345467850566, 'acc': 100.0, 'step': 10454},\n",
       " {'loss': 0.002534798113629222, 'acc': 100.0, 'step': 10455},\n",
       " {'loss': 0.00293498276732862, 'acc': 100.0, 'step': 10456},\n",
       " {'loss': 0.0024838484823703766, 'acc': 100.0, 'step': 10457},\n",
       " {'loss': 0.002520022913813591, 'acc': 100.0, 'step': 10458},\n",
       " {'loss': 0.0014982324792072177, 'acc': 100.0, 'step': 10459},\n",
       " {'loss': 0.0018371138721704483, 'acc': 100.0, 'step': 10460},\n",
       " {'loss': 0.0021160792093724012, 'acc': 100.0, 'step': 10461},\n",
       " {'loss': 0.0019232832128182054, 'acc': 100.0, 'step': 10462},\n",
       " {'loss': 0.0023400878999382257, 'acc': 100.0, 'step': 10463},\n",
       " {'loss': 0.002586106536909938, 'acc': 100.0, 'step': 10464},\n",
       " {'loss': 0.0019052830757573247, 'acc': 100.0, 'step': 10465},\n",
       " {'loss': 0.0027190411929041147, 'acc': 100.0, 'step': 10466},\n",
       " {'loss': 0.0024313766043633223, 'acc': 100.0, 'step': 10467},\n",
       " {'loss': 0.0016618946101516485, 'acc': 100.0, 'step': 10468},\n",
       " {'loss': 0.0013367269420996308, 'acc': 100.0, 'step': 10469},\n",
       " {'loss': 0.003011970315128565, 'acc': 100.0, 'step': 10470},\n",
       " {'loss': 0.0018634084844961762, 'acc': 100.0, 'step': 10471},\n",
       " {'loss': 0.003616468980908394, 'acc': 100.0, 'step': 10472},\n",
       " {'loss': 0.0023980180267244577, 'acc': 100.0, 'step': 10473},\n",
       " {'loss': 0.0022974475286900997, 'acc': 100.0, 'step': 10474},\n",
       " {'loss': 0.0017363885417580605, 'acc': 100.0, 'step': 10475},\n",
       " {'loss': 0.002192792249843478, 'acc': 100.0, 'step': 10476},\n",
       " {'loss': 0.0013506278628483415, 'acc': 100.0, 'step': 10477},\n",
       " {'loss': 0.0021264718379825354, 'acc': 100.0, 'step': 10478},\n",
       " {'loss': 0.0017523939022794366, 'acc': 100.0, 'step': 10479},\n",
       " {'loss': 0.0017105976585298777, 'acc': 100.0, 'step': 10480},\n",
       " {'loss': 0.0025257074739784002, 'acc': 100.0, 'step': 10481},\n",
       " {'loss': 0.002196722663938999, 'acc': 100.0, 'step': 10482},\n",
       " {'loss': 0.0029061215464025736, 'acc': 100.0, 'step': 10483},\n",
       " {'loss': 0.0017808187985792756, 'acc': 100.0, 'step': 10484},\n",
       " {'loss': 0.0015815834049135447, 'acc': 100.0, 'step': 10485},\n",
       " {'loss': 0.00326604675501585, 'acc': 100.0, 'step': 10486},\n",
       " {'loss': 0.002798824803903699, 'acc': 100.0, 'step': 10487},\n",
       " {'loss': 0.0021325622219592333, 'acc': 100.0, 'step': 10488},\n",
       " {'loss': 0.006164507009088993, 'acc': 100.0, 'step': 10489},\n",
       " {'loss': 0.0018026498146355152, 'acc': 100.0, 'step': 10490},\n",
       " {'loss': 0.0016174401389434934, 'acc': 100.0, 'step': 10491},\n",
       " {'loss': 0.0020800866186618805, 'acc': 100.0, 'step': 10492},\n",
       " {'loss': 0.002972046844661236, 'acc': 100.0, 'step': 10493},\n",
       " {'loss': 0.0030487366020679474, 'acc': 100.0, 'step': 10494},\n",
       " {'loss': 0.001615959801711142, 'acc': 100.0, 'step': 10495},\n",
       " {'loss': 0.0015539713203907013, 'acc': 100.0, 'step': 10496},\n",
       " {'loss': 0.002975056879222393, 'acc': 100.0, 'step': 10497},\n",
       " {'loss': 0.0023341104388237, 'acc': 100.0, 'step': 10498},\n",
       " {'loss': 0.0017738008173182607, 'acc': 100.0, 'step': 10499}]"
      ]
     },
     "metadata": {},
     "execution_count": 26
    }
   ],
   "source": [
    "history['train'][-100:-1]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.741226Z",
     "start_time": "2025-06-26T01:45:37.737721Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "pMjJdQ2l4pNw",
    "outputId": "61763ecc-04ee-4e3d-f4bf-5e8effa8a79c"
   },
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "[{'loss': 2.3045929691314697, 'acc': 9.12, 'step': 0},\n",
       " {'loss': 1.238996053314209, 'acc': 55.2, 'step': 500},\n",
       " {'loss': 0.8884034400939942, 'acc': 68.5, 'step': 1000},\n",
       " {'loss': 0.798821385192871, 'acc': 72.0, 'step': 1500},\n",
       " {'loss': 0.7209422531604767, 'acc': 74.56, 'step': 2000},\n",
       " {'loss': 0.716133192205429, 'acc': 76.2, 'step': 2500},\n",
       " {'loss': 0.7522935387611389, 'acc': 74.5, 'step': 3000},\n",
       " {'loss': 0.7045076326847076, 'acc': 76.7, 'step': 3500},\n",
       " {'loss': 0.7004623531341553, 'acc': 77.4, 'step': 4000},\n",
       " {'loss': 0.6616050040721894, 'acc': 79.54, 'step': 4500},\n",
       " {'loss': 0.655171839094162, 'acc': 80.06, 'step': 5000},\n",
       " {'loss': 0.6894081469178199, 'acc': 80.16, 'step': 5500},\n",
       " {'loss': 0.6748722618579864, 'acc': 81.16, 'step': 6000},\n",
       " {'loss': 0.6891608224391937, 'acc': 80.96, 'step': 6500},\n",
       " {'loss': 0.6997112890720367, 'acc': 80.96, 'step': 7000},\n",
       " {'loss': 0.7185570673942566, 'acc': 80.74, 'step': 7500},\n",
       " {'loss': 0.7153246128082276, 'acc': 81.36, 'step': 8000},\n",
       " {'loss': 0.7157424350500107, 'acc': 81.34, 'step': 8500},\n",
       " {'loss': 0.7296340623855591, 'acc': 81.0, 'step': 9000},\n",
       " {'loss': 0.7395104514598847, 'acc': 81.04, 'step': 9500},\n",
       " {'loss': 0.744106754732132, 'acc': 81.06, 'step': 10000}]"
      ]
     },
     "metadata": {},
     "execution_count": 27
    }
   ],
   "source": [
    "history['val'][-1000:-1]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "id": "NcujMCRC4pNw"
   },
   "source": [
    "# 绘制损失曲线和准确率曲线"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.816716Z",
     "start_time": "2025-06-26T01:45:37.744941Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 466
    },
    "id": "3xZ57j-C4pNw",
    "outputId": "cc0820e6-c0fd-477f-b44c-9e430c58e7d0"
   },
   "outputs": [
    {
     "output_type": "display_data",
     "data": {
      "text/plain": [
       "<Figure size 1000x500 with 2 Axes>"
      ],
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzoAAAHBCAYAAAChe85HAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAiDhJREFUeJzs3XeUVPX9//HnzOzsbK+wBVh67wiIgJ0mKAG7Br9KYokJmBiiJuRnCZZgTOyNGGONBCtoFJUVRATpRQHp0mEX2GV7mdmZ+/vj7g4sdfvdnXk9zrln5t655T27986d93yazTAMAxERERERkQBitzoAERERERGRuqZER0REREREAo4SHRERERERCThKdEREREREJOAo0RERERERkYCjREdERERERAKOEh0REREREQk4SnRERERERCTgKNEREREREZGAE2J1AFXh8/k4cOAA0dHR2Gw2q8MREQkahmGQn59PixYtsNv121gF3ZdERKxT5XuTUQ0vvfSS0atXLyM6OtqIjo42zjvvPGPu3Lln3Oa9994zunTpYrhcLqNnz57GZ599Vp1DGoZhGHv37jUATZo0adJk0bR3795qf3YHMt2XNGnSpMn66Wz3pmqV6LRq1YrHH3+cTp06YRgGb775JuPGjWPt2rX06NHjpPW/++47brzxRqZPn84VV1zBzJkzGT9+PGvWrKFnz55VPm50dDQAe/fuJSYmpjohA+DxeJg3bx4jR47E6XRWe3uRM9H5JfXNynMsLy+PtLQ0/+ewmHRfksZO55jUJ6vPr6rem6qV6IwdO7bS/GOPPcbLL7/MsmXLTpnoPPvss1x22WXce++9ADzyyCOkp6fzwgsvMGPGjCoft6JaQExMTI1vKBEREcTExOhilzqn80vqW2M4x1Q9qzLdl6Sx0zkm9amxnF9nuzfVuI2O1+vl/fffp7CwkMGDB59ynaVLlzJlypRKy0aNGsWcOXPOuO/S0lJKS0v983l5eYD5R/V4PNWOtWKbmmwrcjY6v6S+WXmO6bwWEZGmqtqJzvr16xk8eDAlJSVERUUxe/Zsunfvfsp1MzIySE5OrrQsOTmZjIyMMx5j+vTpTJs27aTl8+bNIyIioroh+6Wnp9d4W5Gz0fkl9c2Kc6yoqKjBjykiIlIXqp3odOnShXXr1pGbm8sHH3zALbfcwjfffHPaZKcmpk6dWqkkqKIe3siRI2tcRSA9PZ0RI0ao+FbqnM4vqW9WnmMVJeoiIiJNTbUTndDQUDp27AhA//79WblyJc8++yz//Oc/T1o3JSWFzMzMSssyMzNJSUk54zFcLhcul+uk5U6ns1Y3+dpuL3ImwXh+GYZBWVkZXq/X6lACmtfrJSQkBK/XW+ddPDscDkJCQk5bzznYzum6dKbrw+PxEBISQklJia6fRuhs14WINA21HkfH5/NVak9zvMGDBzN//nzuvvtu/7L09PTTtukRkabD7XZz8OBBVW1qAIZhkJKSwt69e+vli1dERASpqamEhobW+b6D1dmuj/r+n0rt6boQafqqlehMnTqV0aNH07p1a/Lz85k5cyYLFy7kyy+/BODmm2+mZcuWTJ8+HYDf/e53XHTRRTz55JNcfvnlzJo1i1WrVvHKK6/U/TsRkQbj8/nYuXMnDoeDFi1aEBoaqi9r9cjn81FQUEBUVFSdlugYhoHb7ebw4cPs3LmTTp06aVDQOlCV66O+/qdSe7ouRAJHtRKdQ4cOcfPNN3Pw4EFiY2Pp3bs3X375JSNGjABgz549lT4MhgwZwsyZM7n//vv585//TKdOnZgzZ061xtARkcbH7Xbj8/lIS0urVQchUjU+nw+3201YWFidf+EKDw/H6XSye/du/zGkdqpyfdTn/1RqT9eFSGCoVqLz73//+4yvL1y48KRl1157Lddee221ghKRpkFf0AKD/o/1Q3/Xpk3/P5GmT1exiIiIiIgEHCU6IiI11LZtW5555pk62dfChQux2Wzk5OTUyf5ErFaX14eISE0o0RGRoHLxxRdX6gmyNlauXMkdd9xRJ/uSqlu0aBFjx46lRYsW2Gw25syZU+l1wzB48MEHSU1NJTw8nOHDh7Nt27ZK62RnZzNhwgRiYmKIi4vj1ltvpaCgoAHfReOk60NEAokSHRGR41SMfVIVzZs3V2cMFigsLKRPnz68+OKLp3z9iSee4LnnnmPGjBksX76cyMhIRo0aRUlJiX+dCRMmsHHjRtLT0/n0009ZtGiRvpRXga4PEWlKlOiISNCYOHEi33zzDc8++yw2mw2bzcYbb7yBzWbj888/p3///rhcLhYvXsyOHTsYN24cycnJREVFMXDgQL766qtK+zuxao7NZuPVV1/lyiuvJCIigk6dOvHJJ5/UON4PP/yQHj16EB4eTu/evXnqqacqvf7SSy/RqVMnwsLCSE5O5pprrvG/9sEHH9CrVy/Cw8NJTExk+PDhFBYW1jiWxmT06NE8+uijXHnllSe9ZhgGzzzzDPfffz/jxo2jd+/evPXWWxw4cMBf8rNp0ya++OILXn31VQYNGsT555/P888/z6xZszhw4EADv5vGozFfH16vl1tvvZV27doRHh5Oly5dePbZZ09a77XXXqNHjx64XC5SU1OZPHmy/7WcnBx+9atfkZycTFhYGD179uTTTz+t2R9LRJqEWg8Y2th9u+0wf/1sE5FldsZYHYxIgDIMg2KPNaO7hzsdVR7D59lnn2Xr1q307NmThx9+GICNGzcC8Kc//Yl//OMftG/fnvj4ePbu3cuYMWN47LHHcLlcvPXWW4wdO5YtW7bQunXr0x5j2rRpPPHEE/z973/n+eefZ8KECezevZuEhIRqva/Vq1dz3XXX8Ze//IVrr72WBQsWcM8999CsWTMmTpzIqlWr+O1vf8vbb7/NkCFDyM7O5ttvvwXg4MGD3HjjjTzxxBNceeWV5Ofn8+2332IYRrViaIp27txJRkYGw4cP9y+LjY1l0KBBLF26lBtuuIGlS5cSFxfHgAED/OsMHz4cu93O8uXLT5lAlZaWVhocOy8vDwCPx4PH46m0rsfjwTAMfD4fPp8POPkaMQyDYrcXR6mn3segquo18vTTT7N161Z69OjBtGnTgMrXxxNPPFHp+rjssst45JFHcLlcvP3224wdO5ZNmzZVuj4q/g4Vpk2bxuOPP87f/vY3XnjhBSZMmMDOnTvPen2UlZXRsmVL3n33XRITE/nuu++48847SU5O5rrrrgPg5Zdf5p577mH69Olcdtll5Obm8t133/n/D6NHjyY/P5+33nqLDh068OOPP2Kz2SrFdzyfz4dhGHg8HhwOx1n/fo1NxXl54vl5JoZhsHZvLu+t3sfG/XkE/ieG1JRhGOQXOHhxx5Iaf4b1bBnD41fWbMiZqp7XAZ/olHkNNmXk0ypSgxmK1Jdij5fuD35pybF/fHgUEaFV+yiLjY0lNDSUiIgIUlJSANi8eTMADz/8sH9MMICEhAT69Onjn3/kkUeYPXs2n3zySaVfiU80ceJEbrzxRgD++te/8txzz7FixQouu+yyar2vp556imHDhvHAAw/g8/lISUlh586d/P3vf2fixIns2bOHyMhIrrjiCqKjo2nTpg39+vUDzESnrKyMq666ijZt2gDQq1evah2/qcrIyAAgOTm50vLk5GT/axkZGSQlJVV6PSQkhISEBP86J5o+fbr/y//x5s2bd1L1rJCQEFJSUigoKMDtdgNQ7PYy+KllNXtTtbR0ynmEh579i7rNZsNutxMSEuJ/TxXJ3R//+EcGDRrkX7ddu3a0a9fOP3/PPffw4Ycf8t577/mrAPp8PkpKSvxJIcANN9zA5Zdf7t/n888/z8KFCyslpqczZcoU//OxY8eyaNEi/vvf//qvrccee4xJkyYxceJEAFJSUujSpQt5eXksWLCAFStWsHz5cjp27AjAhRdeCFApvuO53W6Ki4tZtGhRlavrNUbp6elnXafQAyuP2FiaaSejWN+XpKpsHCyqeU0Bb3E+c+fuqdG2RUVFVVov4BOduAgnYF7EIiKnc/yv+wAFBQX85S9/4bPPPvMnDsXFxezZc+YP5d69e/ufR0ZGEhMTw6FDh6odz6ZNmxg3blylZUOGDOHZZ5/F6/UyYsQI2rRpQ/v27bnsssu47LLL/FWC+vTpw7Bhw+jVqxejRo1i5MiRXHPNNcTHx1c7DjFNnTq10hftvLw80tLSGDlyJDExMZXWLSkpYe/evURFRfkHmgxxW/dFOTomuso/BoSEhBAaGup/TxUJzwUXXFDpfRYUFDBt2jTmzp1b6fo4fPiwfz273U5YWFil7QYMGOCfj4mJISYmhoKCgpP+hqfy0ksv8frrr7Nnzx6Ki4txu9307dvXf40dPHiQ0aNHn3Jf27Zto1WrVpxzzjlV+juA+X8MDw/nwgsvbJIDhno8HtLT0xkxYgROp/Ok1w3DYPnOo7y7ah9f/piJx2uW34Q57YzumcKo7kmEOZteSZY0jLKyMtasXsM5/c8hJKRm6URsmJOeLc9+7Z/K6X6gOFHAJzoJkaEAFDbdH2NEGr1wp4MfHx5l2bHrQmRkZKX5e+65h/T0dP7xj3/QsWNHwsPDueaaa/y/0J/OiV8ozlQ1pjaio6NZs2YNCxcuZN68eTz44IP85S9/YeXKlcTFxZGens53333HvHnzeP755/l//+//sXz58kq/wgeiipK6zMxMUlNT/cszMzPp27evf50Tk8+ysjKys7P925/I5XLhcrlOWu50Ok/6n3u9Xn/pSMWgk5EuZ6VrxOfzkZ+XT3RMdL0PTFmd6p1wrGQHjg2aGR1dOc777rvvlNeHx+OptN7x+wLz73ji68cf53RmzZrFvffey5NPPsngwYOJjo7m73//O8uXL8dut/uv3+P/5serSNiq87e22+3YbLZT/o+bkhPjP5xfyger9/Huyj3syjr2q3j31BhuPDeNn/VtSWx4032/0jA8Hg+FOwwu6pJsyfVR1WMGfKITX57ouH02SjzeJv1hJdJY2Wy2Kv9ibLXQ0FC83rO3J1qyZAkTJ070t9coKChg165d9RzdMd26dWPJkiWVln333Xd07tzZ314gJCSE4cOHM3z4cB566CHi4uJYsGABV111FTabjaFDhzJ06FAefPBB2rRpw+zZsyuVSgSidu3akZKSwvz58/2JTV5eHsuXL+fXv/41AIMHDyYnJ4fVq1fTv39/ABYsWIDP56tUPasunXiN+Hw+ykIdRISG1HuiUx2N9fpYsmQJQ4YM4Te/+Y1/2Y4dO/zPo6Ojadu2LfPnz+eSSy45afvevXuzb98+tm7dSufOnestzsbK6zP4dtthZq3Yy1ebMinzmaU3Ua4Qfta3BTcMTKNXy9h6by8m0tCaxjeTWoh2hRBit1HmMzha5CE6oukVP4tI3Wnbti3Lly9n165dREVFnba0pVOnTnz00UeMHTsWm83mbyvTUP7whz8wcOBAHnnkEa699lq+/vprXnzxRV566SUAPv30U3766ScuvPBC4uPjmTt3Lj6fjy5durB8+XLmz5/PyJEjSUpKYvny5Rw+fJhu3bo1WPz1qaCggO3bt/vnd+7cybp160hISKB169bcfffdPProo3Tq1Il27drxwAMP0KJFC8aPHw+YSeRll13G7bffzowZM/B4PEyePJkbbriBFi1aWPSuGofGen106tSJt956iy+//JJ27drx9ttvs3LlykollH/5y1+48847SUpK8nc8sGTJEu666y4uuugiLrzwQq6++mqeeuopOnbsyObNm7HZbNVuP9eU5JTC81/v4MM1B9ifU+xf3q91HDcObM3lvVOJdAX8V0EJYo3nZ6R6YrPZ/O10corUUEck2N1zzz04HA66d+9O8+bNT9vm5qmnniI+Pp4hQ4YwduxYRo0aVa36/bV1zjnn8N577zFr1ix69+7NX//6V6ZNm+ZvaB0XF8dHH33EpZdeSrdu3ZgxYwb//e9/6dGjBzExMSxatIgxY8bQuXNn7r//fp588klGjx7dYPHXp1WrVtGvXz9/5wtTpkyhX79+PPjgg4BZrequu+7ijjvuYODAgRQUFPDFF19Uamfxzjvv0LVrV4YNG8aYMWM4//zzeeWVVyx5P41JY70+fvWrX3HVVVdx/fXXM2jQILKysiqV7gDccsstPPPMM7z00kv06NGDK664otJAsR9++CEDBw7kxhtvpHv37tx3331VKr1qasq8PuZtzOD2t9fwlzUOnluwg/05xcSGO5k4pC1f3H0Bs38zlOsGpinJkYBnM5pAf6N5eXnExsaSm5tbpQaLJxrx1EK2HSrkzYn9uajrqetfi9SUx+Nh7ty5jBkzJmiqRpaUlLBz507atWvXJBvpNjU+n4+8vDxiYmLqpZrTmf6ftf38DVRn+rtU5fqo7/+p1F5T+5zbk1XEu6v28P6qfRzKP9YV+rlt4/n5oDZc1jNFnQtInbH6u09V701BkcrHRYQChRxViY6IiIgECHeZj3k/ZjBrxV4Wbz/iX54YGcqV/VqQXLCdiVcPDJof4UROFBSJTry/6tqZe0sSEakvd955J//5z39O+dpNN93EjBkzGjgikcZD10f17DhcwKwVe/hwzX6yC83vNjYbnN+xGTee25rh3ZKxGV7mzt1+lj2JBLYgSXTMnteyVaIjIhZ5+OGHueeee075mqqESbDT9XF2JR4vc9cfZNaKvazYle1fnhzj4roBaVw3II20hGOD13o8gdf+SKS6giTRMUt0VHVNRKySlJREUlKS1WGINEq6Pk5v08E8Zq3Yw+y1+8krMQcFtNvgki5J3Hhuay7u0pwQh9p5iZxKUCU6qromIiIijV1haRmf/nCA/67Yy7q9Of7lLePCuX5gGtcOaEVqbLh1AYo0EUGS6JhV11SiIyIiIo2RYRj8sC+XWSv38Mm6AxS6zapnIXYbI7onc8O5rbmgYzPsdg3qKVJVQZHoxPmrrqlER0RERBqP3GIPH6/bz39X7GXTwTz/8nbNIrl+YBpXn9OK5tEuCyMUabqCItGJ14ChIiIi0kgYhsGq3Uf574o9zF1/kBKPD4DQEDuje6Zww8DWnNc+AZtNpTcitREciU6kqq6JiIiItbIL3Xy0Zh+zVu5l+6EC//LOyVHcMLA1V53TsnzsPxGpC8GR6ISbJTpFbi8lHq9GBhaRGmvbti133303d99991nXtdlszJ49m/Hjx9d7XCKNQXWuj2Dh8xks/SmL/67Yw7yNmbi9ZulNuNPBFb1TueHc1pzTOk6lNyL1ICgSneiwEOwY+LCRU+QhJVaJjoiIiNSvzRl5/Ort1ezOKvIv69kyhhsGtuZnfVsQE+a0MDqRwBcUiY7NZiPSCfkes9g4JTbM6pBEREQkwP3t883szioiyhXCuL4tuPHc1vRsGWt1WCJBI2hGmIosT+nU85pI8HrllVdo0aIFPp+v0vJx48bxy1/+kh07djBu3DiSk5OJiopi4MCBfPXVV3V2/PXr13PppZcSHh5OYmIid9xxBwUFx+rpL1y4kHPPPZfIyEji4uIYOnQou3fv9m87bNgwoqOjiYmJoX///qxatarOYhNp6OvjqaeeolevXkRGRpKWlsZvfvObStcDwJIlS7j44ouJiIggPj6eUaNGcfToUQB8Ph9PPPEEHTt2xOVy0bp1ax577LEax1PX8ks8LNmeBcCHvx7CY1f2UpIj0sCU6IhI7RkGuAutmQyjymFee+21ZGVl8fXXX/uXZWdn88UXXzBhwgQKCgoYM2YM8+fPZ+3atVx22WWMHTuWPXv21PpPVFhYyKhRo4iPj2flypW8//77fPXVV0yePBmAsrIyxo8fz0UXXcQPP/zA0qVLueOOO/z19u+44w5atmzJypUrWb16NX/6059wOlXtpck41TXiKWpU10hDXx92u53nnnuOjRs38uabb7JgwQLuu+8+/+vr1q1j2LBhdO/enaVLl7J48WLGjh2L12uOLzN16lQef/xxHnjgAX788UdmzpxJcnJyjWKpDwu3HMbt9dG+WSSdk6OsDkckKAVF1TWASKcB2DhaqERHpM55iuCvLaw59p8PQGhklVaNj49n9OjRzJw5k2HDhgHwwQcf0KxZMy655BLsdjt9+vTxr//II48we/ZsPvnkE39CUlMzZ86kpKSEt956i8hIM94XXniBsWPH8re//Q2n00lubi5XXHEFHTp0AKBbt26A+cv1/v37ue++++jatSsAnTp1qlU80sBOuEbsQFxDHbuK10hDXx/Hd1jQtm1bHn30Ue68805eeuklAJ544gkGDBjgnwfo0aMHAPn5+Tz77LO88MIL3HLLLQB06NCB888/v9px1JcvN2YAMLJHijoaELFI0JXoZBeqi2mRYDZhwgQ+/PBDSktLAXjnnXe44YYbsNvtFBQUcM8999CtWzfi4uKIiopi06ZNdVKis2nTJvr06eNPcgCGDh2Kz+djy5YtJCQkMHHiREaNGsXYsWN59tlnOXjwoH/d3/zmN9xxxx0MHz6cxx9/nB07dtQ6JpETNeT18dVXXzFs2DBatmxJdHQ0//d//0dWVhZFRWbD/YoSnVPZtGkTpaWlp33daqVlXhZuOQzAqB6Np5RJJNgEUYmO+aiqayL1wBlh/mps1bGrYezYsRiGwWeffcbAgQP59ttvefrppwG45557SE9P5x//+AcdO3YkPDyca665Bre7YT43Xn/9dX7729/yxRdf8O6773L//feTnp7Oueeey5/+9CcmTpzI559/zueff85DDz3ErFmzuPLKKxskNqmlE64Rn89HXn4+MdHR2O31/JtjNa6Rhro+du3axRVXXMGvf/1rHnvsMRISEli8eDG33norbrebiIgIwsPDT7v9mV5rDL7bnkVBaRnJMS76tIqzOhyRoBU0iU5UiFlHWYmOSD2w2apcfcxqYWFhXHXVVbzzzjts376dLl26cM455wBmw+eJEyf6k4eCggJ27dpVJ8ft1q0bb7zxBoWFhf5SnSVLlmC32+nSpYt/vX79+tGvXz+mTp3K4MGDmTlzJueeey4AnTt3pmvXrvz+97/nxhtv5PXXX1ei01SceI34fOD0msvqO9Gphoa6PlavXo3P5+PJJ5/0J3rvvfdepXV69+7N/PnzmTZt2knbd+rUifDwcObPn89tt91Woxjqk7/aWvcU7HZVWxOxSuP5dK1nx6quKdERCXYTJkzgs88+47XXXmPChAn+5Z06deKjjz5i3bp1fP/99/z85z8/qQeq2hwzLCyMW265hQ0bNvD1119z11138X//938kJyezc+dOpk6dytKlS9m9ezfz5s1j27ZtdOvWjeLiYu69914WLlzI7t27WbJkCStXrvS34RGpSw1xfXTs2BGPx8Pzzz/PTz/9xNtvv82MGTMqrTN16lRWrlzJb37zG3744Qc2b97Myy+/zJEjRwgLC+OPf/wj9913H2+99RY7duxg2bJl/Pvf/67Ve68LXp9B+o+ZAIzqkWJxNCLBLXgSnfKqazlFaqMjEuwuvfRSEhIS2LJlCz//+c/9y5966ini4+MZMmQIY8eOZdSoUf5fs2srIiKCL7/8kuzsbAYOHMg111zDsGHDeOGFF/yvb968mauvvprOnTtzxx13MGnSJH71q1/hcDjIzs5m4sSJdO7cmeuuu47Ro0ef8pdukdpqiOujT58+PPXUU/ztb3+jZ8+evPPOO0yfPr3SOp07d2bevHl8//33nHvuuQwePJiPP/6YkBDzl8sHHniAP/zhDzz44IN069aN66+/nkOHDtX8jdeR1buPklXoJjbcyaD2CVaHIxLUbIZRjb5ZLZKXl0dsbCy5ubnExMRUe3uPx8NL787l6Q0htIwLZ8mfLq2HKCVYeTwe5s6dy5gxY4Kmu9+SkhJ27txJu3btCAvTALz1zefzkZeXR0xMTL205zjT/7O2n7+B6kx/l6pcH/X9P5Xaq+nn3MP/+5HXluzkqn4teer6vvUX4FkE471JGo7V51dV701B8+mqcXRERESkPhmGUalbaRGxVvAkOuXJZpHbS4nHa20wItLkvfPOO0RFRZ1yqhjrQyRYBev1sfFAHvtziglz2rmoc3OrwxEJekHT61q4Axx2G16fQU6Rh5RYh9UhiUgT9rOf/YxBgwad8jVVE5FgF6zXx7zy0pyLOjcnPFTfM0SsFjSJjs0GceFOsgrdZBe6SYlVuwIRqbno6Giio6OtDkOkUQrW6+PLjeptTaQxCZqqawDxEeavSDlqpyMiIiJ1aNeRQrZk5hNitzGsa7LV4YgIwZboRIYCkK1ER6RONIFOG6UK9H+sH/q7Nm3V/f9VdEJwXvtEYiMCt3qeSFMSVIlOXLj5wXNUg4aK1EpFHfuioiKLI5G6UPF/DOS2Ew1J10dgqO51UZHojOqh0hyRxiJo2ugAJJR3vXZUg4aK1IrD4SAuLs4/OF9ERAQ2m83iqAKXz+fD7XZTUlJSp2OuGIZBUVERhw4dIi4uDodDjafrQlWuj/r6n0rt1eS6OJRXwpo9OQCM6K72OSKNRVAlOvER5VXXVKIjUmspKebNvDGMRB7oDMOguLiY8PDwekko4+Li/P9PqRtnuz7q+38qtVed62Lej2YnBH3T4tTZkUgjElSJTlxERYmOEh2R2rLZbKSmppKUlITHo1LS+uTxeFi0aBEXXnhhnVcvczqdKsmpB2e7Purzfyq1V93r4li1Nf1gINKYBFWiEx+hqmsidc3hcOiLcj1zOByUlZURFhamL8VNzOmuD/1PA0dukYelO7IAtc8RaWyCqmJwRdU1dUYgIiIidWHBlkzKfAadk6No3zzK6nBE5DhBlehUVF1TGx0RERGpC19u0CChIo1VUCU6CeUlOhowVERERGqrxOPlm62HASU6Io1RUCU6FSU6hW4vJR6vxdGIiIhIU7Zo62GKPV5axoXTo0WM1eGIyAmqlehMnz6dgQMHEh0dTVJSEuPHj2fLli1n3OaNN97AZrNVmsLCrOl6MdoVgsNuduOZow4JREREpBa+3GhWWxvZI1ndhIs0QtVKdL755hsmTZrEsmXLSE9Px+PxMHLkSAoLC8+4XUxMDAcPHvRPu3fvrlXQNWW3247reU3V10RERKRmyrw+5m9W+xyRxqxa3Ut/8cUXlebfeOMNkpKSWL16NRdeeOFpt7PZbI1mMLq4iFCOFLjV85qIiIjU2Iqd2eQUeUiIDGVg2wSrwxGRU6hVG53c3FwAEhLOfIEXFBTQpk0b0tLSGDduHBs3bqzNYWulokOCbJXoiIiISA1VDBI6vFuSv1q8iDQuNR4w1OfzcffddzN06FB69ux52vW6dOnCa6+9Ru/evcnNzeUf//gHQ4YMYePGjbRq1eqU25SWllJaWuqfz8vLA8yRpGsyAnvFNh6Ph9hw8y0fyS/RaO5SJ44/v0Tqg5XnmM5raep8PoN/ffsTEa4QbhrUuk7a0vh8hr99jqqtiTReNU50Jk2axIYNG1i8ePEZ1xs8eDCDBw/2zw8ZMoRu3brxz3/+k0ceeeSU20yfPp1p06adtHzevHlERETUNGTS09MpyLIDdlas20D8kfU13pfIidLT060OQQKcFedYUVFRgx9TpC49mb6FF7/eAUCpx8ttF7Sv9T5/2J9LRl4JkaEOhnZsVuv9iUj9qFGiM3nyZD799FMWLVp02lKZ03E6nfTr14/t27efdp2pU6cyZcoU/3xeXh5paWmMHDmSmJjqd9/o8XhIT09nxIgR/Biyi6WHdtKsZTvGjOla7X2JnOj488vpdFodjgQgK8+xihJ1kabog9X7/EkOwGNzN9E2MZLh3ZNrtd+KamsXd00izOmo1b5EpP5UK9ExDIO77rqL2bNns3DhQtq1a1ftA3q9XtavX8+YMWNOu47L5cLlcp203Ol01uom73Q6aRZtdm2dV1KmL6VSp2p7foqcjRXnmM5paapW7Mxm6kc/ADDpkg5kF3r474o9/HbWWj64cwjdazHuTUWio2prIo1btRKdSZMmMXPmTD7++GOio6PJyDAv9NjYWMLDwwG4+eabadmyJdOnTwfg4Ycf5rzzzqNjx47k5OTw97//nd27d3PbbbfV8VupmopBQ7M1jo6IiEhA2p1VyK/eXoXHazCmVwp/GNEFr2GwO6uQ73ZkcdubK5kzeShJ0dUf12/7oXx+OlxIqMPOJV2a10P0IlJXqtXr2ssvv0xubi4XX3wxqamp/undd9/1r7Nnzx4OHjzonz969Ci333473bp1Y8yYMeTl5fHdd9/RvXv3unsX1ZAQafa6pu6lRUREAk9usYdfvrGSo0UeereK5clr+2K323A67Lw8oT/tm0VyILeE299aTYnHW+39V3RCMKRjItFhKvEUacyqXXXtbBYuXFhp/umnn+bpp5+uVlD1Kb4i0VH30iIiIgHF4/Ux6Z017DhcSGpsGK/ePIDw0GNtaGIjnLw2cSDjX1rC93tz+MN73/P8jf2wV6N7aFVbE2k6ajWOTlMUH6ESHRERkUBjGAYPfbKRxduPEBHq4NVbBpAUc3LVtLbNIplxU3+cDhufrT/IM19trfIxDuQU88O+XGw2GN6tdh0aiEj9C7pEp2LA0EK3l9Ky6hdZi4iISOPz+pJdzFy+B5sNnr2hHz1axJ523fPaJ/LYlb0AeG7Bduas3V+lY8wrL80Z0Cae5tEnd5okIo1L0CU60WEh/hGMc9QhgYiISJO3YHMmj372IwB/Ht2NEVXoPvq6AWnceVEHAO774AdW7co+6zYaJFSkaQm6RMdutxEXXt7zmqqviYiINGmbDuZx18y1+Ay4YWAat11Q9aEv7hvVhVE9knF7fdzx9mr2ZJ1+gNzsQjcrypMhJToiTUPQJTpwXIcESnRERESarEP5Jdz25ioK3V6GdEjkkfE9sdmq3rGA3W7j6ev70rNlDNmFbm59cyV5Jaeu7fHVpky8PoPuqTGkJUTU1VsQkXoUlIlORTudo6q6JiIi0iSVeLzc8dZq9ucU075ZJC9P6I/TUf2vNRGhIbx680CSY1xsO1TA5JlrKfP6TlpvnnpbE2lygjLROTZoqEp0REREmhrDMLjn/e9ZtzeH2HAn/544kNiImo9pkxIbxr9vGUi408GirYd5+NMfK71eWFrGom1HABjVU72tiTQVQZnoaNBQERGRpuvpr7bx6Q8HcTpszLipP+2aRdZ6nz1bxvL09X2x2eCtpbt587td/te+2XoYd5mPNokRdEmOrvWxRKRhBGWio0FDRUREmqY5a/fz3PxtADx2ZS8Gd0iss31f1jOFP17WFYBp/9vIwi2HgMqDhFanDZCIWCs4E53y4m2V6IiIiDQdq3dnc98HPwDwq4vac92AtDo/xq8ubM+1/VvhM2DyzLVs2J/Lgs1mwjOqh6qtiTQlQZromCU62eqMQEREpEnYm13EHW+txu31MbJ7Mn8c1bVejmOz2Xjsyl6c2y6BgtIyrv/nUvJLymge7aJfWny9HFNE6keI1QFYoaKNTo6qromIiDSInw4X8K9vf8JdZtRo+1W7s8kqdNOjRQzP3NAXu73+qpCFhtj55039ufKlJewqH1tnRPfkej2miNS9oEx04ipKdFR1TUREpN55vD4mzVzLpoN5tdpPcoyLf98ykIjQ+v/6Eh8Zyr8nDuTKF5eQV1LG5b1S6/2YIlK3gjLRUa9rIiIiDeffi3ey6WAecRFO7ryoAzUpFwlx2BnTK4WU2LA6j+90OjSPYvakoWzNyGdox2YNdlwRqRvBmeiUl+gUur2UlnlxhTgsjkhERCQw7c4q5JmvtgJw/+XduaZ/K4sjqp4OzaPo0DzK6jBEpAaCsjOC6LAQKqrZ5qhDAhERkXphGAb3z9lAicfHkA6JXH1OS6tDEpEgEviJztFd2Na8SYujy/yL7HbbsZ7XVH1NRESkXsxZt59vtx3BFWLnr1f20hg0ErjK3OApMR+9HvCWgc8LRs0635C6EfhV1w6sJeTzP9A+shPwsH9xfGQoWYVuDRoqIiJSD7IL3Tzy6SYAfjusE22bRVockTSI4qOQ+SOEuCA0ClxR5mNoFDia+NdOw4C8/XBkKxzZVv5Y/jz/YBV2YAObHWy28ucV83YIi4XIZhDZ/Lip2amfO8Pr+53WjmGYyZ7PA153edLnKV9WVp4IusEZAc061msoTfyMq4JYczCxCHdWpcXHBg1V1TUREZG69uhnP5Jd6KZrSjR3XNje6nCkvpTkwu6lsOtb2LkIMtYDpynFCAmH0Mjy5Cf6WBLkf4w2H+21+HoaEmruxxVzwuNxz0NCz7wPTwlk7zhFQrMdPIU1jw0DDO+p/zyeoiomS5h/oxMTH58XDJ85+Z97Ky/3v+Y9YT2jPLaKR99Zn4f4fIwoLiJk6z3lScxxyYzhrdr7aHsBTPy0auvWUNAkOmGeo5R5PeA0E5xjg4aqREdEJJB4vV7+8pe/8J///IeMjAxatGjBxIkTuf/++/1VpwzD4KGHHuJf//oXOTk5DB06lJdffplOnTpZHH1gWLztCB+t2Y/NBtOv6oXTEfg15YNGaf6xxGbXt3Dwe/ML8PFiW5uP7nwoLTC/AAOUFZtT0ZGGjflEIWEnJz+uGDPOI1vh6G5Om6zZQyChPTTrXHlKaFeeoB2fEFClpAGfF0pyoPAIFB4+bjpy8nOvG9wF5nR0VwP8sU7NBkQAVLW8wB4Cdic4nOZzhxPC4+otvgqBn+hENsdwhGLzus1MOawDcNygoWqjIyISUP72t7/x8ssv8+abb9KjRw9WrVrFL37xC2JjY/ntb38LwBNPPMFzzz3Hm2++Sbt27XjggQcYNWoUP/74I2FhDdd9cSAqdnv58+z1ANwyuC39WsdbHJHUSmkB7F0GO7+FXYvhwNqTf7FP6ABtz4d2F5qP0SmVXy8rNffjzgd34bHnpeVf2CvNF1a9ROBEhmEmAqV5ZkJWUv5YMVWUxpSVmFPh4dPvyxULzSsSmU7HEpr4tuaXdCsYhvneCo8clwQdMtsF2exgL68GZ3OUzzuOzZ/2tVNUpavC87IyL0u++44hF1yE0xVROYFxhB57XpHcWNQ+L/ATHbsdYlrC0Z3Y8vZBczPRiVOJjohIQPruu+8YN24cl19+OQBt27blv//9LytWrADM0pxnnnmG+++/n3HjxgHw1ltvkZyczJw5c7jhhhssiz0QPLdgG3uyi0iNDeOeUV2sDid4GT4c3lKzzUyJAd7SY20jyiqel5a3ofCUL3Mfm3L2mInN/tVmu4rjxbc1qx21vcBMbGLP0pteiMucIhPr7e1WibesPKHKPyERKn+EY0lNZHPLvpyfls1mtuUJi4XEDpaGYng85ERmQEovf22pxijwEx3AiG2F7ehOyN3nX5YQWdFGR4mOiEggGTJkCK+88gpbt26lc+fOfP/99yxevJinnnoKgJ07d5KRkcHw4cP928TGxjJo0CCWLl16ykSntLSU0tJS/3xeXh4AHo8Hj6f6bT0rtqnJto3Z5ox8Xln0EwAPXd4Vl90IuPfYqJTkQc5ubDm7seXsgpw92I7uxpa7m5CcvVzhLYUfan8YIzYNo835+Nqcj9FmKMSeMBZSU/ofh0SZU2TqmdcrKzvz60HO6s+wqh43KBIdYswL0pa337+ooo3OUY2jIyISUP70pz+Rl5dH165dcTgceL1eHnvsMSZMmABARkYGAMnJyZW2S05O9r92ounTpzNt2rSTls+bN4+IiIgax5qenl7jbRsbnwFPr3fg9dnom+CjdOcq5u60OqqmzWaUEeHOIqL0MBHuQ0SWHibCfZjI0kNEuA8T6q1aw3gDGz5biDnZQ4499y9z4rM58Nmc/nXcjkiyo7pwOKobxa7m5o72Aft+oE6yJwkIVn2GFRUVVWm9oEh0jJjyItXcvf5lxxIdleiIiASS9957j3feeYeZM2fSo0cP1q1bx913302LFi245ZZbarTPqVOnMmXKFP98Xl4eaWlpjBw5kpiYmGrvz+PxkJ6ezogRI3A24mof1fHm0t3sKdxCdFgIL942lKRol9UhNV0lediXvYB9xT+xnaWXLyOiGUZcG4hrjRHXFiPefO6JasX8pd9z6cjLcIaG+ath2an6IIotavcuJIBZ/RlWUap+NsGR6JQXsdpyjyvRiVSiIyISiO69917+9Kc/+aug9erVi927dzN9+nRuueUWUlLMhtKZmZmkph6rvpKZmUnfvn1PuU+Xy4XLdfIXd6fTWaubfG23byz25xTz1FfbAfjT6K60TIiyOKImqqwUVr0G3zwBxdnmspAws01MXBvzMb4txLfxL7O5ojhVSxLD48Hr2IzTFR4Q55g0TlZ9hlX1mEGR6FR0MW3LO76NTnmio3F0REQCSlFREXZ75d+sHQ4HPp/ZBW67du1ISUlh/vz5/sQmLy+P5cuX8+tf/7qhw23yDMPgwTkbKHJ7GdAmnhsHtrY6pKbH54MNH8CCR8xOAAASO8Hwh6DL5WbHSiJSbUGR6ByrurbP7JrPZvMPGFpQWoa7zEdoiD5EREQCwdixY3nsscdo3bo1PXr0YO3atTz11FP88pe/BMBms3H33Xfz6KOP0qlTJ3/30i1atGD8+PHWBt8Efb4hg/mbD+F02Jh+VS/s9kbWU1Vjt30+fPVQ+UCbQFQKXPwn6Pd/4AiKr2ki9SY4rqDyRMfmKTS7WYxIICbMid1mNp7MKXKTFKNxE0REAsHzzz/PAw88wG9+8xsOHTpEixYt+NWvfsWDDz7oX+e+++6jsLCQO+64g5ycHM4//3y++OILjaFTTblFHh76ZCMAv7m4I52Soy2OqAk5sBa++gv8tNCcd8XA0N/Beb+G0EgrIxMJGMGR6DjDKQ2JxlWWb5bqRCRgt9uIjwglq9BNthIdEZGAER0dzTPPPMMzzzxz2nVsNhsPP/wwDz/8cMMFFoAe/2Izh/NLad88kt9cYu24Hk1G9k+w4FHY8KE5b3fCubfDBfdYP86MSIAJjkQHKAptdizRSe0NQFyE00x0NJaOiIhItazYmc1/V5jtSaZf2QtXiMPiiBq5gsOw6O9mZwM+D2CD3tfBJX82OxYQkToXNIlOsTOReHZW6mI6ITKUHYcLydFYOiIiIlVWWuZl6kfmWCo3npvGoPYqiTit0gJY+iJ89xy4C8xlHYbB8L/4f3gVkfoRPIlOaPmH8CnG0lGJjoiISNW9vHAHOw4X0izKxZ8u62Z1OI2HYUB+BmRuhEMbIfNH2DEfCg+br6f2hRHToP3FVkYpEjSCJtEp8ic6x7qY9g8aqkRHRESkSrYfyuelr3cAMO1nPYiNCNIxWkoL4NCmYwlNRXJTfPTkdePbwrAHofuV6ipapAEFTaJTfKpExz9oqKquiYiInI3PZzD1o/W4vT6GdU1iTK8Uq0Oqfz4fZG0vT2jKk5pDG+HorlOvb7NDYkdI6g7JPSCll1lVLSS0QcMWkWBMdHKOb6Nj/gp1tEglOiIiImcza+VeVu46SkSog4fH98RmC+AxcwwDtn8F8x6Aw5tOvU5U8rGEJrmH+bx5F3CGN2ysInJKQZPoFDmbmU8KMqCsFEJcxKmNjoiISJUUlJbx9y83A3DPyC60jGvgL/PFRyE0umEG0Tz4A6Q/cGyMm5BwSOpWOaFJ7gGRzeo/FhGpsaBJdNwh0RghYdjKSiDvACS0I6E80clRiY6IiMgZvfndLo4WeWjXLJKbB7dpmIN6PbD5U1j5b9j1LUSnwoBbof9EiGpe98fLO2COcbNuJmCAIxQG/Qou+AOEx9f98USkXgVNooPNBjEtIXuH2fNaQjt/G51sJToiIiKnlV/i4ZVFPwHwu2GdCHHUc4P63P2w5k1Y/QYUZB4XyEH4+lFY9AT0vAYG3QEt+tX+eKX5sORZ+O4FKCs2l/W82uxAQGPciDRZwZPoAEZsK2zZO/wdEsSX9xRztFCdEYiIiJzO60t2kVvsoUPzSMb2aVE/BzEM2PkNrHwVNs8Fw2suj0wyS3D6/hz2rYLlM2D/Kvh+pjmlnWeWunQbC45q9gDnLYO1b8PXf4XCQ+aytPNg1GPQakCdvj0RaXhBlegQ08p8LE90EspLdApKy3CX+QgNUZePIiIix8st9vCvb83SnLuHd8Zhr+MOCIpz4Pv/mtXTsrYdW97mfBh4K3S94liPZQntoPe15QnPP2HjbNi7zJyiW8DAX0L/X5y97YxhwLZ0sx3O4c3l+24Pw6eZCVMgd7IgEkSCKtExYisSHbPntZgwJ3Yb+AyznU5STJiF0YmIiDQ+ry3eSX5JGZ2To7i8V2rd7fjAOlj1b/jh/WPVxUKjoc8NZoKTdIaBSFsNMKeRj8Cq12HVa5Bf3r7mm79Dr2vMUp7UPidve2JHA+HxcNEfzbY/6gJaJKAEV6JTUaJT3sW03W4jLiKU7EI32Up0REREKskpcvPa4p2AWZpjr21pjqcEfpxjVk/bt/LY8qTuMPA26H0duKKrvr/oFLhkKlwwBTbOgeUvw4G1sO4dc2o9GM69wyylKTysjgZEgkxQJTrEtjQfjx80NMJJdqFb7XRERERO8Oq3O8kvLaNrSjSX9ajF4KA+H6z8Fyx8HIqzzWV2J3T/mZngtB5cu+piIS7oc72ZKFW04/lxDuxZak7RqWYVOXU0IBJUgirRMWLTzCe5+8z6uTYbCZGh7DhcqEFDRUREjpNd6Ob1JWZpzu9H1KI05/AW+OQu2LvcnI9pBQMmQr+bITq5boKtYLNB2kBzynsUVldUaztovq6OBkSCSlAlOkSX9xRTVgxF2RCZqEFDRURETuFf3/5EodtLjxYxjOxeg4SkzG122bzoCfC6ITQKhv/F7CygIQb9jEmFS/5sVk3bNg9CI6H9JepoQCSIBFeiE+KCqGSzT/7cPRCZqEFDRURETnCkoJQ3v9sFwO+Hd8ZW3eRg/2r4+C44tNGc7zQSLn8K4tLqNtCqCHGZbXREJOgEV6IDENuqPNHZBy36HRs0VG10REREAHhl0U8Uub30bhXLsG5JVd/QXQRfPwbLXgLDB+EJMPoJsxc0laSISAMLwkQnzfyl6cRBQ1WiIyIiwqH8Et5augsw2+ZUuTTnp2/gf7+Fo+a29LoWLnv87GPaiIjUkyBMdCp3MV1RoqNER0REBP75zU+UeHz0TYvj4s7Nz75BcY45Ls2at8z5mJZwxdPQeVS9xikicjb26qw8ffp0Bg4cSHR0NElJSYwfP54tW7acdbv333+frl27EhYWRq9evZg7d26NA641f89rZqJT0UbnqDojEBGRIJeZV8J/lu0GYEpVSnM2fQovDjqW5Ay4FX6zTEmOiDQK1Up0vvnmGyZNmsSyZctIT0/H4/EwcuRICgsLT7vNd999x4033sitt97K2rVrGT9+POPHj2fDhg21Dr5G4o7rYhqIjzSrrmWrREdERILcywt3UFrmY0CbeC7odIYqZ/mZ8N7N8O4EKMiAxI4wcS5c8RSExTRcwCIiZ1CtqmtffPFFpfk33niDpKQkVq9ezYUXXnjKbZ599lkuu+wy7r33XgAeeeQR0tPTeeGFF5gxY0YNw66Fiqpr/jY65b2uqTMCEREJYgdzi5m5fA9whtIcw4B1M+HLP0NJDtgcMPR3cNEfwRnWsAGLiJxFrdro5ObmApCQkHDadZYuXcqUKVMqLRs1ahRz5sw57TalpaWUlpb65/Py8gDweDx4PNVPSCq28Xg8EJGCE6DwEJ7ifKJDHQDkl5ZRWFxKaEi1CrlEKp9fIvXAynNM53XweOnrHbi9Ps5tl8DgDoknr+Augo9uh82fmvMpvWHcC5Dap2EDFRGpohonOj6fj7vvvpuhQ4fSs2fP066XkZFBcnLlgcaSk5PJyMg47TbTp09n2rRpJy2fN28eERERNQ2Z9PR0MAwut4cS4nPzzSczyXelYMOBgY0PP/2C2NAa716CXHp6utUhSICz4hwrKipq8GNKw9ufU8yslWcozSk4DP+93uy11OGCS6bC4LsaZuBPEZEaqvEn1KRJk9iwYQOLFy+uy3gAmDp1aqVSoLy8PNLS0hg5ciQxMdWv++vxeEhPT2fEiBE4nU4c+9rCka1c3K8jRrsLefiHrzla5KH/4AvonBxdh+9EgsGJ55dIXbPyHKsoUZfA9sKC7Xi8BkM6JHJe+xNKc7J2wH+uhqM7ITwebpwFrc+zJlARkWqoUaIzefJkPv30UxYtWkSrVq3OuG5KSgqZmZmVlmVmZpKSknLabVwuFy6X66TlTqezVjd5//axaXBkKyGFB8HpJCEylKNFHvJKDX1RlRqr7fkpcjZWnGM6pwPf3uwi3l9l9kT6+xGdK7+4bxXMvA6KsiCuNdz0ETTrZEGUIiLVV60GKYZhMHnyZGbPns2CBQto167dWbcZPHgw8+fPr7QsPT2dwYMHVy/SunTiWDoRGktHRESC0wsLtlPmM7igUzMGtj2uze3mufDGFWaSk9oXbv1KSY6INCnVKtGZNGkSM2fO5OOPPyY6OtrfziY2Npbw8HAAbr75Zlq2bMn06dMB+N3vfsdFF13Ek08+yeWXX86sWbNYtWoVr7zySh2/lWqIPbGLaSU6IiISfHZnFfLBGvNeePfw40pzVv4b5t4Dhg86Dodr3wRXlEVRiojUTLVKdF5++WVyc3O5+OKLSU1N9U/vvvuuf509e/Zw8OBB//yQIUOYOXMmr7zyCn369OGDDz5gzpw5Z+zAoN7FadBQERGR5+Zvx+szuLhLc/q3iTe7j57/MHw2xUxy+v2f2SZHSY6INEHVKtExDOOs6yxcuPCkZddeey3XXnttdQ5Vv/xj6ZiJTlzFoKEaS0dERILET4cLmL3WLM35/fDOUOaGT+6CH2aZK1z8Z7joPjjVeDoiIk1AcPYL6U909oPP5y/RyVHVNRERCRLPzd+Gz4BhXZPo09wOM6+Fnxaag4D+7Dnod5PVIYqI1EpwJjoxLQEbeEuh6Ii/M4JsJToiIhIEth/K55PvDwBw75AYeH00ZG4AZyRc9xZ0Gm5xhCIitVetNjoBw+GE6FTzee7eY50RqI2OiIgEgWfnb8dnwC0di+n66VVmkhOZBL+YqyRHRAJGcCY6UKmL6YTyNjpHi9RGR0REAtuWjHw+/eEAg2ybePDQ7yFvHyR2gtu+ghZ9rQ5PRKTOKNHJ3Uecel0TEZEg8ez8rVxuW8p/XI/jcOdB2nlw6zyIb2N1aCIidSp4E524Y2PpVHRGkF9ahrvMZ2FQIiIi9WdvdhHJP77OC6HP48QD3X4GN8+BiISzbisi0tQEb6ITe2wsnZhwJ/by3jNzilWqIyIigSl96WoeCPmPOTPo13DtG+AMtzQmEZH6EsSJzrGxdBx2G7Hh5e10NJaOiIgEIMMwyPvhf9htBkfi+8Hox8HusDosEZF6o0Qn1xwsraLntWy10xERkQD0/b5c+hQtAyC6788sjkZEpP4FcaJTXnWtKAvcRRo0VEREAtqnK7cxxP4jAK5uYyyORkSk/gVvohMWC6HR5vPjel7ToKEiIhJoPF4fRzak47J5KI5sBc27WB2SiEi9C95Ex2ar1E7HP5aOqq6JiEiAWbT1MIPcKwBwdR9j3gNFRAJc8CY6UKmL6Yo2Oho0VEREAs3sNXu51LEWAHvX0RZHIyLSMII70TmuQ4J4DRoqIiIBKK/Ew4FNy0m25eB1RkKboVaHJCLSIJTogFl1TW10REQkAH2xPoMLjNUA2DteCiEuiyMSEWkYQZ7oqOqaiIgEto/W7mOYYw0Ats6XWRyNiEjDUaIDkLuX+Ah1RiAiIoFlf04xP/20g972nRjYoNNIq0MSEWkwQZ7oVFRd2098RAigREdERALHx+v2c4ljHQC2lv0hKsnagEREGlBwJzrRqWCzg89DopEDQH5pGR6vz9q4REREaskwDGav2c8wu1ltDVVbE5EgE9yJjiMEolsAEF2a4R9W4Kg6JBARkSZu44E89hzK5nz7BnNB51HWBiQi0sCCO9EB/1g6jrx9xIVXtNNRhwQiItK0zV67n8H2H4mwlZo/6qX0sjokEZEGpUTnuC6mj/W8phIdERFpusq8Pj5ed4BL7eYgoXQehb/agohIkFCio0FDRUQkwCzefoQjBSWMCClPdLqMtjYgERELKNE5fiwdDRoqIiIBYM7a/XSx7SWVIxASDu0utDokEZEGp0TnuLF0EiLNNjo5GjRURESaqMLSMr7cmMmwimpr7S8CZ7i1QYmIWECJTkXVtZy9x0p0VHVNRESaqC82ZFDs8TLGtc5coN7WRCRIKdGpSHRKckhymSU5aqMjIiJN1Zx1+0kgjx6+reaCTkp0RCQ4KdEJi4GwWABa2rIB9bomIiJNU2ZeCUu2H+Fi+zpsGJDSG2JbWh2WiIgllOiAv51Oc+MQANlqoyMiIk3Qx+v24zPgmuiN5oLOl1kbkIiIhZTogL/6WmKZmeio6pqIiDRFs9cewEkZA8rWmAuU6IhIEFOiA/4SnVh3BqCqayIi0vRszshj08E8hoRsIdRbCJFJ0KKf1WGJiFhGiQ74S3Qiis1EJ7+kDI/XZ2VEIiIi1TJ7zX4A/i9xs7mg80iw6zYvIsFLn4DgT3RCC/Zhs5mLVKojIiJNhddn8PG6A4DB4LKV5kJVWxORIKdEB/xV12y5+4kL16ChIiLStCz7KYuMvBJ6hx0msnAPOEKh/cVWhyUiYiklOgBxZqJD3n4Swx2ABg0VEZGm46Pyamt3ppaPndP2fHBFWxiRiIj1lOgARCWDPQQML+3CCwD1vCYi0pTt37+fm266icTERMLDw+nVqxerVq3yv24YBg8++CCpqamEh4czfPhwtm3bZmHENVfs9vLFhoMAnO9bbS5UtTURESU6ANgdENMCgA7OowAcVdU1EZEm6ejRowwdOhSn08nnn3/Ojz/+yJNPPkl8fLx/nSeeeILnnnuOGTNmsHz5ciIjIxk1ahQlJSUWRl4z837MoNDtpVu8l+hD5clc51HWBiUi0giEWB1AoxGbBjl7SHNkAc3VGYGISBP1t7/9jbS0NF5//XX/snbt2vmfG4bBM888w/3338+4ceMAeOutt0hOTmbOnDnccMMNDR5zbcxea1Zbm5y2G9tWLzTvBvFtrQ1KRKQRUKJTobxDghYcAdRGR0Skqfrkk08YNWoU1157Ld988w0tW7bkN7/5DbfffjsAO3fuJCMjg+HDh/u3iY2NZdCgQSxduvSUiU5paSmlpaX++by8PAA8Hg8eT/VrAFRsU5Ntj3ekoJRvt5n3rQt8ZmmOt+MIfLXcrzR9dXWOiZyK1edXVY+rRKdCeRfTzY3DgLqXFhFpqn766SdefvllpkyZwp///GdWrlzJb3/7W0JDQ7nlllvIyDDHTEtOTq60XXJysv+1E02fPp1p06adtHzevHlERETUONb09PQabwuw8KANr89B+8gywnaa+/ruSDTZc+fWar8SOGp7jomciVXnV1FRUZXWU6JToTzRSfBkAuqMQESkqfL5fAwYMIC//vWvAPTr148NGzYwY8YMbrnllhrtc+rUqUyZMsU/n5eXR1paGiNHjiQmJqba+/N4PKSnpzNixAicTmeNYgL418vLgDz+0MdN6JpCjPB4zrvmLrODHQlqdXWOiZyK1edXRan62eiTsEJ5F9PRpeavednqjEBEpElKTU2le/fulZZ169aNDz/8EICUlBQAMjMzSU1N9a+TmZlJ3759T7lPl8uFy+U6abnT6azVTb42228/lM+GA3mE2G1c6lgHgK3TSJyu8BrHI4GntueoyJlYdX5V9Zjqda1CeRud8CKzi84cVV0TEWmShg4dypYtWyot27p1K23atAHMjglSUlKYP3++//W8vDyWL1/O4MGDGzTW2qjohOCizs0J3/WVuVC9rYmI+CnRqRDTEoAQTz7RFKkzAhGRJur3v/89y5Yt469//Svbt29n5syZvPLKK0yaNAkAm83G3XffzaOPPsonn3zC+vXrufnmm2nRogXjx4+3Nvgq8vkM5qw9AMCELj44vBlsDugwzOLIREQaD1Vdq+CKgvB4KD5KC9sRtpRE4PH6cDqUC4qINCUDBw5k9uzZTJ06lYcffph27drxzDPPMGHCBP869913H4WFhdxxxx3k5ORw/vnn88UXXxAWFmZh5FW3Ylc2+3OKiXaFcIFRPkhomyEQHmdpXCIijYkSnePFpkHxUVras9jibU1OkYfm0SfXyRYRkcbtiiuu4Iorrjjt6zabjYcffpiHH364AaOqO3PKq62N7pWCc/vL5kJVWxMRqUTFFccrb6fTIfQooC6mRUSk8SnxePlsvdme9OqecbBrsflC59HWBSUi0ggp0TleeRfT7UOyAQ0aKiIijc/SHVnkl5SRGhvGQO868HkgoQM062h1aCIijYoSneOVJzqtHGaio57XRESksdmfUwxAjxax2Ld9aS7sfJmFEYmINE7VTnQWLVrE2LFjadGiBTabjTlz5pxx/YULF2Kz2U6aTjf6tKXKx9JJMQ4DkF2osXRERKRxqaht0CwiBLZWJDpqnyMicqJqJzqFhYX06dOHF198sVrbbdmyhYMHD/qnpKSk6h66/pW30WnmMxMdtdEREZHGpiLR6c52KDoCrhho3XTG/xERaSjV7nVt9OjRjB5d/QaPSUlJxMXFVXu7BlVedS3Wc4QQyjiqNjoiItLIVCQ6vQqXmgs6DoOQUAsjEhFpnBqsjU7fvn1JTU1lxIgRLFmypKEOWz2RSeAIxY6PZI6SrRIdERFpZCoSnXbZ35oL1D5HROSU6n0cndTUVGbMmMGAAQMoLS3l1Vdf5eKLL2b58uWcc845p9ymtLSU0tJS/3xeXh4AHo8Hj6f67WYqtqnKtiExLbEd3UkLWxZZBaU1Op4El+qcXyI1YeU5pvO68ckqdJNKFnF5WwAbdBxhdUgiIo1SvSc6Xbp0oUuXLv75IUOGsGPHDp5++mnefvvtU24zffp0pk2bdtLyefPmERERUeNY0tPTz7rOEE8YzYGWtiOsPXCYuXPn1vh4Elyqcn6J1IYV51hRUVGDH1POLLuwlOGOteZM2rkQmWhtQCIijVS9Jzqncu6557J48eLTvj516lSmTJnin8/LyyMtLY2RI0cSExNT7eN5PB7S09MZMWIETqfzjOs6/jcXfthEC9sR1oRGMGbMBdU+ngSX6pxfIjVh5TlWUaIujYNhGGQXurnUXp7oqLc1EZHTsiTRWbduHampqad93eVy4XK5TlrudDprdZOv0vbxbQBoacviaJFHX1ylymp7foqcjRXnmM7pxiW/tAyHt4ShIRvMBZ2r3zmQiEiwqHaiU1BQwPbt2/3zO3fuZN26dSQkJNC6dWumTp3K/v37eeuttwB45plnaNeuHT169KCkpIRXX32VBQsWMG/evLp7F3WpvOe1FrYj5JeU4fH6cDo0rqqIiFgvu8DNufbNhNk8ENsakrpZHZKISKNV7URn1apVXHLJJf75iipmt9xyC2+88QYHDx5kz549/tfdbjd/+MMf2L9/PxEREfTu3Zuvvvqq0j4alfJEp6XtCAA5RR6aR59cuiQiItLQsgrd/vsTKT3BZrM2IBGRRqzaic7FF1+MYRinff2NN96oNH/fffdx3333VTswy8S2BqClPQswOFrkVqIjIiKNQnahm0TK201FNrM2GBGRRk51sk4U2xKASEqIoVCDhoqISKORXVhKoq0i0WlubTAiIo2cEp0TOcMhwvyVzOyQQImOiIg0DlmFbprZcs0ZJToiImekROdUjmunk12owfJERKRxyC44vuqaEh0RkTNRonMqcWmA2fOaSnRERKSxyC5yH1d1TW10RETORInOqcRWJDpZaqMjIiKNRnahW210RESqSInOqZRXXWtlO8LRIlVdExGRxiGnoJgE8s0ZJToiImekROdUjhs0VFXXRESksfAUZGG3GRjYIDzB6nBERBo1JTqnclzVtWxVXRMRkUbCUWQOFuoLiwdHtYfCExEJKkp0TqU80Ukih4LCQouDERERgWK3l0hvjjkTpWprIiJno0TnVCKb4XOEYbcZOIsyrI5GRESErMJSmmGOoWNXoiMiclZKdE7FZsOIaQlArDuTMq/P4oBERCTYHd/jmk0dEYiInJUSndOwVYylwxFyitXzmoiIWCtLXUuLiFSLEp3TsMdV9LymsXRERMR62QVuEsurrinRERE5OyU6p1PeIUFL2xH1vCYiIpbLLnTTzF+i08zaYEREmgAlOqdzXKKjQUNFRMRqqromIlI9SnROJ/a4qmsaNFRERCyWXVhKIkp0RESqSonO6ZQnOi1tR8guKLU4GBERCXbZhR6V6IiIVIMSndMp71463OamNO+wxcGIiEiwKyjII9pWbM6ojY6IyFkp0TkdZxiFoYnm89x91sYiIiJBz1dwxHy0h4IrxuJoREQaPyU6Z1AS3gIAZ8EBiyMREZFgZys2Ex1veCLYbBZHIyLS+CnROQNPtJnoRBTvtzgSEREJZu4yH+HubABsUWqfIyJSFUp0zsCIMbuYjirJsDgSEREJZkeLjo2h44hKsjgaEZGmQYnOGYQkmIlOfNkhiyMREZFgllXg9nctrRIdEZGqUaJzBq7ENgAk+Q5T7PZaHI2IiASr7EqDharHNRGRqlCicwbRyW0BcyydZT9lWRuMiIgErazCUhJtueaMxtAREakSJTpnYIszS3Sa23JZvn6LxdGIiEiwyi500wwNFioiUh1KdM4kIoHchF4AuLbMwTAMiwMSEZFgVLnqmhIdEZGqUKJzFuH9bwTgEvfXbDtUYHE0IiISjLLURkdEpNqU6JxFaJ9r8WKnr30Hq1avtDocEREJQtn5pSSiNjoiItWhROdsopI42GwIAM6NH1gcjIiIBKOSwqOE2sp7/4xQiY6ISFUo0amCiupr5xakk1votjgaEREJNkbBYQDKnFHgDLM4GhGRpkGJThUk9r+SYsJoYzvED8vTrQ5HRESCjKP4CAC+cJXmiIhUlRKdqgiNZHviJQDY1r9ncTAiIhJMvD6D0NLysdyi1D5HRKSqlOhUUUi/GwDocXQ+Xk+pxdGIiEiwyClyk1g+hk5IdJLF0YiINB1KdKqo46AxHCaOePLZuewTq8MREZEgkV14LNGxq0RHRKTKlOhUkdMZyg9xwwEoWzfL4mhERCRYmGPoqGtpEZHqUqJTHb2uB6Bd1iIoybU4GBERCQbZlQYLVaIjIlJVSnSqoe+5F7LN1xIXbnLXfGR1OCIiEgSyCt008yc66nVNRKSqlOhUQ2J0GMujzOprxav/a3E0IiISDLILjrXRUYmOiEjVKdGpJk+PawBIyloBufstjkZERAJddmGp2uiIiNSAEp1qGti3D8t9XbFjUPaDxtQREZH6dbSgmARbgTmjREdEpMqU6FRTjxYxfOW8GICS1ep9TURE6pcn/zAABnYIj7c4GhGRpkOJTjXZbDY8nX9GqRFCVM5myNhgdUgiIhLICs1ExxMWD3aHxcGIiDQdSnRq4LweHVjg6weA8cO7FkcjIiKBzF50BABfuHpcExGpDiU6NXB+p2b8z7gAAO/374HPa3FEIiISiAzDwFWaDYAtSu1zRESqQ4lODUS5Qihqcym5RgQhhRmwa7HVIYmISADKKykjzjB7XAuJTrI4GhGRpkWJTg1d2K0Vn3nPM2fU+5qIiNSD7EK3v2tphxIdEZFqUaJTQ5d2TWK293wAjB/ngKfY2oBERCTgZBeWHjdYqNroiIhUhxKdGmrbLJLshH7sM5phcxfAls+tDklERAJMVoGbRFtFoqM2OiIi1aFEpxYu7pbCHO9Qc0bV10REpI5lF7pppkRHRKRGqp3oLFq0iLFjx9KiRQtsNhtz5sw56zYLFy7knHPOweVy0bFjR954440ahNr4XNo1yZ/oGNvToTDL4ohERCSQZBW6ScRso6NER0Skeqqd6BQWFtKnTx9efPHFKq2/c+dOLr/8ci655BLWrVvH3XffzW233caXX35Z7WAbm4FtE8gIbct6X1tsvjLY+JHVIYmISAAxOyNQGx0RkZqodqIzevRoHn30Ua688soqrT9jxgzatWvHk08+Sbdu3Zg8eTLXXHMNTz/9dLWDbWxCQ+yc37EZc8o7JVD1NRGRxufxxx/HZrNx9913+5eVlJQwadIkEhMTiYqK4uqrryYzM9O6IE+jID+XSFupOaMSHRGRagmp7wMsXbqU4cOHV1o2atSoSjecE5WWllJaWuqfz8szf83yeDx4PJ5qx1CxTU22PZsLOyXy9MbB/Nk5E8e+FXgObYX4dnV+HGm86vP8EgFrz7Gmfl6vXLmSf/7zn/Tu3bvS8t///vd89tlnvP/++8TGxjJ58mSuuuoqlixZYlGkp+bJPwyA1+7CERplcTQiIk1LvSc6GRkZJCcnV1qWnJxMXl4excXFhIeHn7TN9OnTmTZt2knL582bR0RERI1jSU9Pr/G2p+N1w2HiWeLtwYWO9Wz/aDpbU8fX+XGk8auP80vkeFacY0VFRQ1+zLpSUFDAhAkT+Ne//sWjjz7qX56bm8u///1vZs6cyaWXXgrA66+/Trdu3Vi2bBnnnXeeVSGfrOAQAJ6wRBw2m8XBiIg0LfWe6NTE1KlTmTJlin8+Ly+PtLQ0Ro4cSUxMTLX35/F4SE9PZ8SIETidzroMFYB3Dy5jdsb5XOhYT1f3OjqO/ifohhQ06vv8ErHyHKsoUW+KJk2axOWXX87w4cMrJTqrV6/G4/FUqm3QtWtXWrduzdKlS0+Z6FhV08BeZHZy4w1PbPKla9KwVNtA6pPV51dVj1vviU5KSspJ9Z4zMzOJiYk5ZWkOgMvlwuVynbTc6XTW6iZf2+1P59Juybx6YCClttdxZf+E89B6aNW/zo8jjVt9nV8iFaw4x5rqOT1r1izWrFnDypUrT3otIyOD0NBQ4uLiKi1PTk4mIyPjlPuzqqaBs+QwhEB2iZ01c+fW+DgSvFTbQOqTVedXVWsb1HuiM3jwYOae8OGcnp7O4MGD6/vQDebSrkk8Nz+MdN8ArrAthh/eVaIjImKRvXv38rvf/Y709HTCwsLqZJ9W1DQocpexeYV5/0xq25UxY8bULHgJSqptIPXJ6vOrqrUNqp3oFBQUsH37dv/8zp07WbduHQkJCbRu3ZqpU6eyf/9+3nrrLQDuvPNOXnjhBe677z5++ctfsmDBAt577z0+++yz6h660erdMpZmUaF8UDSEK0IXw4YPYdRj4NAHi4hIQ1u9ejWHDh3inHPO8S/zer0sWrSIF154gS+//BK3201OTk6lUp3MzExSUlJOuU8rahrk5XtItJlj6ITGpmDTl1WpAdU2kPpk1flV1WNWu3vpVatW0a9fP/r16wfAlClT6NevHw8++CAABw8eZM+ePf7127Vrx2effUZ6ejp9+vThySef5NVXX2XUqFHVPXSjZbfbuKhzEt/6elEYEg9FR2DH11aHJSISlIYNG8b69etZt26dfxowYAATJkzwP3c6ncyfP9+/zZYtW9izZ0+jqm1w/Bg6tih1LS0iUl3VLtG5+OKLMQzjtK+/8cYbp9xm7dq11T1Uk3Jp1yQ+XLOPL2xDuZpPzeprnUdaHZaISNCJjo6mZ8+elZZFRkaSmJjoX37rrbcyZcoUEhISiImJ4a677mLw4MGNqse17EI3iVQMFqpER0SkuqpdoiOndkHnZoTYbbxZMMhcsPkzKM23NigRETmlp59+miuuuIKrr76aCy+8kJSUFD766COrw6okq9BNM1tFotPM2mBERJqgRtm9dFMUE+ZkQNt4lv3UntyINsQW7YZNn0LfG60OTUQk6C1cuLDSfFhYGC+++CIvvviiNQFVQXZhqb+Njkp0RESqTyU6dejSrkmAjfSQi8wFP8yyNB4REWm6sgpKSKC8ZoASHRGRalOiU4cu7ZoMwMtZ5V1L//QN5B20MCIREWmqinOzcdq85kyEqq6JiFSXEp061KF5JK0TItjhbc7RxH6AARs+sDosERFpgrz5hwBwh0RDSKjF0YiIND1KdOqQzWYrr74GC8MuNRf+8K6FEYmISFNlFB4GwBOWaHEkIiJNkxKdOnZJeaLz8qHeGHYnZKyHla/CGbrkFhEROZGjOAsAn6qtiYjUiBKdOjaoXQLhTgdb850c7XKdufCzP8C7N0FhlrXBiYhIk+EqNe8Z9qgkiyMREWmalOjUsTCng6EdzV/fZjb7HYx8FOxO2PwpvDwEdiywOEIREWnsSsu8RHmPAhASo0RHRKQmlOjUg4p2OvO3HIEhd8Ht86FZZyjIgLevhC//H5SVWhyliIg0VkcLPSRiDhYaqkRHRKRGlOjUg0u6muMdrNubQ1ZBKaT2gTu+gQG3missfQH+dSkc2mRhlCIi0lhlFZaSaDMTHZuqromI1IgSnXqQGhtOt9QYDAO+2Wr2mkNoBFzxFNw4CyISIXMDvHIxrPiXOioQEZFKsgvd/kSHSHVGICJSE0p06sml5aU6CzYfqvxCl9Hw66XQcTiUlcDce2DmdVBw6BR7ERGRYJRd6KYZueZMZHNrgxERaaKU6NSTinY6i7Yepszrq/xidDJM+ABGPwEOF2ybZ3ZUsHWeBZGKiEhjk1VwfImOEh0RkZpQolNP+qbFEx/hJK+kjMXbj5y8gs0Gg34FdyyEpB5QeBhmXgtz7wVPcYPHKyIijUdOfiFxtkJzRomOiEiNKNGpJw67jXF9WwLw4tfbMU7XDie5O9y+AM77jTm/4hWz7U7G+oYJVEREGp2SPLN9pw8HhMVZG4yISBOlRKce3XlRB0IddlbuOsqyn7JPv6IzDC6bDjd9CFHJcHiz2Svb0hfB5zv9diIiEpC8+ZkAlITGg123ahGRmtCnZz1KiQ3jhnPTAHh2/tazb9BxOPz6O+gyBrxu+PLP8PpoyNxYz5GKiEijUmiW6JSFJ1ociIhI06VEp57deVEHnA4by37KZvlPWWffILIZ3DATrngaQqNg7zL454WQ/iC4C+s/YBERsZyj2Lxf+MLVtbSISE0p0alnLeLCuW6AWarz3IJtVdvIZoMBv4RJy6HbWPCVwZJn4cVBsOXzeoxWREQag9BSM9GxR6sjAhGRmlKi0wB+fXEHQuw2lmzPYtWuM7TVOVFsK7j+P3DjuxDbGnL3wn9vgFkTIHdf/QUsIiKWKfP6iPQcBcAZk2xxNCIiTZcSnQbQKj6Ca/q3AuC5Bdurv4Mul8GkZTD0brCHwOZP4YVz4bsXwFtWt8GKiIilcoo9JGKOoRMak2RxNCIiTZcSnQYy6ZKOOOw2Fm09zNo9R6u/g9BIGDENfvUttB4MnkKY9//Mrqj3rqzzeGsk+ydY/gp8PV3dY4uI1FB24bHBQh3RSnRERGoqxOoAgkVaQgRX9WvJ+6v38dz8bbz+i3NrtqPk7jBxLqx7B9IfgMz18O8R0H8iDH8IwuPrNO4z8pTA7sWw7SvYng5Zx5VWffM4pPaBvjdBr2sgIqHh4hJpbAwDPEXmYMAVU1mxeQ15iqCs5BTLjz23uwvps2cXjs/SwW47ed/HZk75FICxz4JDH/lNQVaBm0RbrjmjwUJFRGpMd70GNPnSjny0dj9fbznM93tz6JMWV7Md2e1wzv+Z3VCnP2AmPatfN6u0jfor9LrW7NCgPhzdBdvSzWnXt+aXNH9cIWZpU1gsbP0SDn5vTvP+nxlrv5ugw6Vgd9RPbCINxVMCxdlQlHXclA2FR05eVvHcW1rjwzmAtgBV6LjxtK54uhYbS0PKLnTTp7xER4mOiEjNKdFpQG0SIxnXtwUfrdnP8wu28eotA2u3w8hEGP8S9P05fDoFjmyBj26HtW/D5U9Bs061D7qsFHZ/ZyY229PhyAnjAUWnmuP/dBoJ7S+GsBhzeWEWrH8f1v3HrMb24xxzik6FPjeYJT3NOtY+PpG65C2DgkzI218+HSif9kPufijIMJMXd0HNj+EIBWc4hISbgwU7IyAkzFzmDD/hubmO1x7K1u0/0blzZxx2B1T6HaN8ptKPG8c9r1huU03lpiK7oMTfRodIdS8tIlJTSnQa2ORLOjJn7X6+2nSIDftz6dkytvY7bXs+3LkYlj4P3zwBOxfBy0OgxTnHvkhV+uIUXnlZxfzxX7AObTKTm52LzPZAFWwOaH1eeXIzApJ7nrr0KDIRzrvTnA7+YJY6/fAe5B+ExU+bU9og6DsBelx5LEGSwFWUbVZvzNoOOXvNkklHKDhc4HBCiOuE587y+VAICT22bkjosX0aBmCc8Him1wwoya2cxOTuO/a8IAMMX9Xej80BEYnHTQknzJdPkeWP4fHmdVaDEk2fx8PWgrl0PH8MDqez2ttL05KXn0u4zW3OqERHRKTGlOg0sPbNo/hZnxbMWXeA5+Zv45WbB9TNjkNC4YI/QI+rYO69ZunL3mV1s++oZOg4wkxs2l8M4XHV2z61tzmNeBi2fgFr3ymPb7k5ffEn6D7OTHraDDW/AEvT5Ck2O6XI2g5HtkHWjmPJTXE1ula3kj0EoltAzHFTbCvzMTr1WAITFlt/VUQlqLlzMwHw2MNwhkZaHI2ISNOlRMcCky/tyMffH2Dej5n8eCCP7i3qsDQjoR1MeB/2r4H8A+UNnIuOeyyp3Ci6rPiEdUrMEpyo5ONKbXrVTfIR4jITmu7jID8Dvp9llvQc2Qrf/9ecYsq/UIa4zBKmEFd5SVTFfNgJz4+bd4aZHSDEt619rHJqPm95W5TD5vl1ZPuxRCZrhznW00mt4I8T0xISOxz7H5W5wXvcVFYKXo/ZnsXrPsXr7uPauhxfZct28qN/lRNeC408dp4dn8TEtDDji0xSsi2WKss/DEBJaAIqvxMRqTklOhbomBTNFb1b8L/vD/D8gm28fFP/uj2AzQat+gN1vN+6FJ0C598NQ38H+1aZ7Yo2fAR5+8ypNpJ6QJfR0HUMpPbTl9Yz8Xmh+Gh5I/ojZgJTeOSE+SzzseiImeScKZEBs6QjsRMkdiyfOpjtxRLam0mGiJyRrdBMdMrCEy2ORESkaVOiY5G7Lu3I/74/wOcbMtiSkU+XlGirQ7KGzQZpA83pssdh3wpwF5Z3t1tiPpaVHvdYfML8ceuV5Jq9vB3aaE7f/sOsatRlNHS5HNpdYJb+WM3nhYPrYMcC2LHQfB6RYJYyxLaC2Jblj2lmCUNsq5pVk/J5zZKzvP3l7VDKG9Tn7St/3G8mMFVtk+JnM9ubRCWbSUxiRzORqUhsIhJVpUukFhzFRwDwRagjAhGR2lCiY5HOydGM6ZXC3PUZPL9gGy/8/ByrQ7JeaITZBqg2irLNThS2fAbb55udH6x6zZxCo6HjMOh6uVklryHHHDq6G376GnZ8DT8thJKcyq+7CyBnz+m3D40qr2JVkQSVT9Gp5rYnJjC5+833bnirFl94PEQ0Mxs+RyaajxHNzB6fIpsd91ozCE/QeCwi9Si01GzPZo/SYKEiIrWhbysWuuvSTsxdn8Fn6w/yu8x8OiUHaalOXYpIgD7Xm1NZqdlr3ObPYMvnZo9aFd1c20OgzRCzpKfLaIhvU7dxlOSZ4wzt+NosucneUfl1Vwy0uxA6XGKOPVRaYLZv8Ze87Ds2FZd3Z3x4szlVR0XD+tiW5UlSy/KSo/L56FTzb+ZQSwCRxsDnM4jwZIMDnDFKdEREakOJjoW6pcYwqkcyX27M5IWvt/PsDf2sDimwhLjMkptOI8xxhQ6sNUt6Ns+Fw5vMJGjnIvjij2Y32a0GgCvaLPlxRZU/P/4xqvJ8iMtfRctmeLHtWwm7vzUTm30rK5em2BzQaqCZ2HS41Oz6+6RSkUGnfh/uovLkZ69ZUpNb3o4pdx/kHTTjqijpOTGZiUrWAK0iTUheiYeE8jF0wuKSLY5GRKRpU6Jjsd8O68SXGzP53/cH+O2wTnRoHmV1SIHJbjc7aGjVH4Y9aHaBvOVzM+nZ8x1kbjCnau0zBEKjCAmNYnRhNiHriiq/ntDhWGLT9nyznU1NhEaYbWDqYgBYEWnUsgvdJJILQEi0Eh0RkdpQomOxHi1iGd4tma82ZfLigu08dX1fq0MKDgntYfAkcyrKhm3z4OguKM03J3eBWZ3MXXDyvKc8ofGVQUkOtpIcnIARFout/cXQ/hIzwVE31yJSTdmFbhJtZokOkeqMQESkNpToNAK/G9aJrzZlMmfdfn47rBNtm6kL3gYVkQB9bqj6+j5vpcSnrDCHxUuXMfSqX+F0hdVfnCIS8LIK3bTxJzrNrQ1GRKSJ0wAjjUCvVrFc2jUJnwEvfL3d6nDkbOwOsxpabEto3gWj5TnkRrRVWxgRqbXsghJ/Gx0lOiIitaNEp5H47TCz/cXstfvZk1V0lrVFRCQQFeUexmErH5Q3QgOGiojUhhKdRqJvWhwXdW6O12fwokp1RESCUmnOIQCKQ2LU7buISC0p0WlEKkp1Plyzj73ZNSvVOVJQyuy1+5j60XoWbM6sy/BERKSe+fLNz+2S0ASLIxERafrUGUEj0r9NPBd0asa3247w8jc7+OuVvc66jcfrY+2eHL7ZeohFW4+wfn+u/7X3Vu3l5QnnMLJHSn2GLSIidcQoOgJAWZh6XBMRqS0lOo3Mb4d14tttR3h/1V4mXdKRlnHhJ62z72gRi7Ye4Zuth/huexb5pWWVXu/RIoaYMCdLf8pi8sy1vHrLAC7srEatIiKNXUixmegY6lpaRKTWlOg0MgPbJjC4fSJLf8pixsIdPDK+JyUeL8t3ZvPNlsN8s/UQOw4XVtomPsLJhZ2bc1Hn5pzfqRlJ0WGUeX3c9d+1fL4hgzveXsVbvxzEue1UFUJEpDFzlWYDYI/Sj1MiIrWlRKcR+t3wTix9JYt3V+5lV1YhK3ZmU1rm87/usNvoV955wYWdm9OzZSwOu63SPkIcdp69oR8lb6/i6y2H+eUbK/nPbYPomxbXwO9GRESqwjAMwj1HwQ7OmGSrwxERafKU6DRC57VP5Nx2CazYmc2328xqDKmxYVxUXmozpGMzYsPP3htPaIidl2/qzy9eX8nSn7K45bUVzLrjPLqlxtT3WxARkWoqdHuJN8x2luFxSnRERGpLiU4jNf2qXrz09Q66pUZzUefmdEyKwmaznX3DE4Q5Hbx6ywD+79/LWbMnh//793Le/dVgOjSPqoeoRUSkprIL3CTazEQnNFaJjohIbal76UaqQ/MonryuD7dd0J5OydE1SnIqRLpCeP0X59I9NYYjBW4m/Gt5jbuvFhGR+pFd5CaRPHMmUm10RERqS4lOkIgNd/L2refSKSmKjLwSfv7qMjJyS6wOS0REymUXltLMpkRHRKSu1CjRefHFF2nbti1hYWEMGjSIFStWnHbdN954A5vNVmkKCwurccBSc4lRLv5z2yDaJEawN7uYCa8u40hBqdVhiYgIcDS3gBhbeWm7upcWEam1aic67777LlOmTOGhhx5izZo19OnTh1GjRnHo0KHTbhMTE8PBgwf90+7du2sVtNRcckwY79w2iBaxYew4XMj//XsFuUUeq8MSEQl6xTmZAHhxQFictcGIiASAaic6Tz31FLfffju/+MUv6N69OzNmzCAiIoLXXnvttNvYbDZSUlL8U3KyGllaqVV8BP+5bRDNolxsOpjHLa+voOCEQUdFRKRhufPMRKfQGQ+1aJcpIiKmaiU6breb1atXM3z48GM7sNsZPnw4S5cuPe12BQUFtGnThrS0NMaNG8fGjRtrHrHUifbNo/jPbecSF+Fk3d4cbn1jJcVur9VhiYgELW/+YQBKXYkWRyIiEhiq1b30kSNH8Hq9J5XIJCcns3nz5lNu06VLF1577TV69+5Nbm4u//jHPxgyZAgbN26kVatWp9ymtLSU0tJjbUfy8szGmR6PB4+n+tWsKrapybaBrENiOK/dfA43v76a5Tuz+dXbK3np5/1whaiPiurQ+SX1zcpzTOd1w7EVmYlOWZgSHRGRulDv4+gMHjyYwYMH++eHDBlCt27d+Oc//8kjjzxyym2mT5/OtGnTTlo+b948IiIiahxLenp6jbcNZL/sCDM2OVi0LYufPz+PiZ19OFRrotp0fkl9s+IcKypSV/QNxVGcBYARoY4IRETqQrUSnWbNmuFwOMjMzKy0PDMzk5SUlCrtw+l00q9fP7Zv337adaZOncqUKVP883l5eaSlpTFy5EhiYmKqEzJg/iKZnp7OiBEjcDqd1d4+GJyzI4vb317DD9l2Fha15O9X98RuV7ZTFTq/pL5ZeY5VlKhL/XOVmomOI1pdS4uI1IVqJTqhoaH079+f+fPnM378eAB8Ph/z589n8uTJVdqH1+tl/fr1jBkz5rTruFwuXC7XScudTmetbvK13T6QXdw1hZcn9OfO/6zmkx8OEhbq4LEre+F0qBpbVen8kvpmxTmmc7rhRHiOgg2cMeqwR0SkLlT7W+yUKVP417/+xZtvvsmmTZv49a9/TWFhIb/4xS8AuPnmm5k6dap//Ycffph58+bx008/sWbNGm666SZ2797NbbfdVnfvQurE8O7JPH19X2w2eG/VPn7+r2UcytOgoiIi9a3E4yXWlwNAWFzVakiIiMiZVbuNzvXXX8/hw4d58MEHycjIoG/fvnzxxRf+Dgr27NmD3X4sfzp69Ci33347GRkZxMfH079/f7777ju6d+9ed+9C6szYPi1whdj5w3vfs3LXUS5/fjEvTTiHgW0TrA5NRCRgZRe6SbSZ1QTD41SiIyJSF2rUGcHkyZNPW1Vt4cKFleaffvppnn766ZocRiwyskcKH0+O4s7/rGZrZgE3vrKMP4/pxi+GtsWmsR1EROrc8YmOLUptdERE6oIaYMgptW8exZxJQxnbpwVlPoOHP/2R381aR5FbA4uKiNS17IJSmlHe8UOkEh0RkbqgREdOKyI0hOdu6MtDY7sTYrfxyfcHuPLF7/jpcIHVoYmIBJS83KO4bOVjFql7aRGROqFER87IZrPxi6Ht+O8d59E82sWWzHzGvbCEeRszrA5NRCRgFOWYn6kltnAIrfl4cSIicowSHamSgW0T+Oyu8xnYNp780jLueHs1T3yxGa/PsDo0EZEmz5Nnjk9X5Iy3OBIRkcChREeqLCkmjJm3n8cvh7YD4KWFO7jltRVkFZRaHJmISNPmyz8EQKkr0eJIREQChxIdqRanw86DY7vz3I39CHc6WLz9CGOfX8y6vTlWhyYi0nQVHgGgLFyJjohIXVGiIzXysz4t+HjyUNo3i+RAbgnXzVjKzOV7MAxVZRMRqa6Q4iwAjAj1uCYiUleU6EiNdU6OZs7koYzsnozb6+PPs9fzxw9/oMTjtTo0EZEmxeU2Ex1HtBIdEZG6okRHaiUmzMk//68/f7ysK3YbvLdqH1e99B3Lf8qyOjQRCVLTp09n4MCBREdHk5SUxPjx49myZUuldUpKSpg0aRKJiYlERUVx9dVXk5mZaVHEEOE5CkBoTJJlMYiIBBolOlJrNpuNX1/cgbdvHURCZCg/Hszj+leWcdOry1mz56jV4YlIkPnmm2+YNGkSy5YtIz09HY/Hw8iRIyksLPSv8/vf/57//e9/vP/++3zzzTccOHCAq666ypJ4PV4fMd4cAMLiUiyJQUQkEIVYHYAEjqEdm/HF7y7guQXbeHflXhZvP8Li7Ue4pEtzpozoQq9WsVaHKCJB4Isvvqg0/8Ybb5CUlMTq1au58MILyc3N5d///jczZ87k0ksvBeD111+nW7duLFu2jPPOO69B4z1a5CbRlgdARHxqgx5bRCSQKdGROpUUE8aj43vxqws78PyCbXy4Zj9fbznM11sOM7J7Mr8f0ZluqTFWhykiQSQ3NxeAhIQEAFavXo3H42H48OH+dbp27Urr1q1ZunTpKROd0tJSSkuPdaWfl2cmJh6PB4/HU+2YKrbxeDwcyikhpTzR8YXH46vB/kROdPw5JlLXrD6/qnpcJTpSL9ISInjimj785uKOPDt/G3PW7Wfej5nM+zGTy3un8vvhneiYFG11mCIS4Hw+H3fffTdDhw6lZ8+eAGRkZBAaGkpcXFyldZOTk8nIyDjlfqZPn860adNOWj5v3jwiIiJqHF96ejrbcgymkA/A/KXfU+rcVeP9iZwoPT3d6hAkgFl1fhUVFVVpPSU6Uq/aNovk6ev7MumSDjz91TY+++Egn/1wkM/XH2Rc35b8blgn2jaLtDpMEQlQkyZNYsOGDSxevLhW+5k6dSpTpkzxz+fl5ZGWlsbIkSOJial+KbXH4yE9PZ0RI0YQ+v1W7DsNfNgYNvZasOvWLLV3/DnmdDqtDkcCjNXnV0Wp+tno01QaRMekaF78+TlMviSPp9O3Mu/HTGav3c8n3x/g6nNactelnUhLqPmvoiIiJ5o8eTKffvopixYtolWrVv7lKSkpuN1ucnJyKpXqZGZmkpJy6s4AXC4XLpfrpOVOp7NWN3mn04kn/zAAhfYYol3hNd6XyKnU9hwVOROrzq+qHlO9rkmD6pYawys3D+B/k8/nki7N8foM3lu1j0ufXMj9c9aTkVtidYgi0sQZhsHkyZOZPXs2CxYsoF27dpVe79+/P06nk/nz5/uXbdmyhT179jB48OCGDhd37iEAipzxDX5sEZFAphIdsUSvVrG8/otzWb37KE+nb2Xx9iP8Z9ke3lu1j5d+fg7DuydbHaKINFGTJk1i5syZfPzxx0RHR/vb3cTGxhIeHk5sbCy33norU6ZMISEhgZiYGO666y4GDx7c4D2uAfgKzBIdtyuxwY8tIhLIVKIjlurfJp7/3DaId+84jwFt4nGX+fjTR+vJLVYvMSJSMy+//DK5ublcfPHFpKam+qd3333Xv87TTz/NFVdcwdVXX82FF15ISkoKH330kSXx2orMRKcsXImOiEhdUqIjjcKg9om8c/sgOjSP5EhBKU98sdnqkESkiTIM45TTxIkT/euEhYXx4osvkp2dTWFhIR999NFp2+fUN0fxETPuiGaWHF9EJFAp0ZFGwxXi4LErewHwzvI9rN591OKIRETqX1hpNgCO6CSLIxERCSxKdKRROa99Itf2N3tH+n+z1+Px+iyOSESkfkWUmT/qOGPUNlFEpC4p0ZFGZ+qYbsRHONmckc9ri3daHY6ISL3x+QxivDkARMRbU3VORCRQKdGRRichMpQ/j+kGwNNfbWVvdtVGvxURaWpySzwkYA58F5mgREdEpC4p0ZFG6Zr+rRjULoESj48HP96AYRhWhyQiUueyCz0k2sxER1XXRETqlhIdaZRsNhuPXdkLp8PG11sO8/mGDKtDEhGpczm5eUTbis2ZSPW6JiJSl5ToSKPVMSmKX1/cEYC/fLKRvBKNrSMigaXgqPkjjocQcMVYHI2ISGBRoiON2m8u7kC7ZpEcyi/lyS+3WB2OiEidKs09BEC+Ix5sNoujEREJLEp0pFELczp4dHxPAN5atpvv9+ZYG5CISB0qyzMTnSJnvMWRiIgEHiU60ugN7diMK/u1xDBg6kfrKdPYOiISIHyFhwFwuxItjkREJPAo0ZEm4f9d3o3YcCc/Hszjje92WR2OiEidsBUdAaAsXImOiEhdU6IjTUKzKBdTR3cF4Kn0rRzIKbY4IhGR2nOWZJlPIptbG4iISABSoiNNxnUD0hjYNp4it5eHPtlodTgiIrXmKs0GwBGtREdEpK4p0ZEmw243x9YJsdtI/zGTLzdqbB0Radoiy44CEKrBQkVE6pwSHWlSOidH86uL2gPm2DoFpWUWRyQiUjOGATG+HADC45XoiIjUNSU60uTcdWknWidEcDC3hKfTt1odjohIjZR6IYE8AKISWlgcjYhI4FGiI01OmNPBI+Vj67y+ZCcb9udaHJGISPUVeAwSMT+/wuJUoiMiUteU6EiTdFHn5ozt0wKfAX+evR6vz7A6JBGRanGXFhFq85ozEc2sDUZEJAAp0ZEm64EruhEdFsIP+3L5z7LdVocjIlI9pfkAFNoiwBlmcTAiIoFHiY40WUnRYfzxMnNsnb9/uYWM3BKLIxIRqTqb22yfk++IszYQEZEApURHmrSfn9uafq3jKCgt4+FPNbaOiDQdIeWJTrEzweJIREQCkxIdadLsdht/vbIXDruNueszeH7+NhZtPcz2QwUUu71WhyciclqhZWbVNbdLiY6ISH0IsToAkdrqlhrDbee345+LfuLJE7qbTogMpUVcGC1iw2kRF07LuPLH+HBaxIXRLNKF3W6zKHIRCWauMrNEpyxcHRGIiNQHJToSEH4/ojPhoQ6+35vDgZwS9ucUU1BaRnahm+xCNxv2551yu1CHndTyRKh980j6pMXRNy2ODs2jcCgBEpF6FOE1P5dskc0tjkREJDAp0ZGAEOZ0cPfwzv55wzDIKynjQE6xf9qfU1L+aM5n5pXg9vrYnVXE7qwilv6UxTvL9wAQ5QqhV8tYf+LTNy2OlFj1iiQidSfSZyY6jmglOiIi9UGJjgQkm81GbLiT2HAn3VJjTrmOx+sjM6+kvASoiM0H81m3N4f1+3MpKC1j6U9ZLP0py79+coyLPq3i6Ns6jr6t4ujVKpboMGdDvSURCTAxvjywQWisBgsVEakPSnQkaDkddlrFR9AqPgJIgH7m8jKvj+2HC/h+bw7r9uawbm8uWzLyyMwrZd6Pmcz7MRMAmw06No+iV8sYQnNtDC5ykxSrxEdEqiaOXADC41MsjkREJDAp0RE5QYjDTteUGLqmxHD9wNYAFLnL2LA/z0x+9uWwbk8O+3OK2XaogG2HCgAHHz7xDRd1TuKqc1pyadckwpwOa9+IiDRaJR4vCZhV16ISUi2ORkQkMCnREamCiNAQzm2XwLntjnUDezi/lB/25bB6VzafrNrBvkL4alMmX23KJDoshMt7pTK+X0vObZugnt1EpJLs/CLa2AoAiFSJjohIvVCiI1JDzaNdDOuWzIUdE+ji3kqn/hfyvw2ZfLx2PwdyS5i1ci+zVu6lZVw44/q24Mp+LemUHG112CLSCORlHQLAix1HhMbRERGpD0p0ROpIp+Qo/tgqnntHdmH5zmzmrN3P3PUH2Z9TzEsLd/DSwh30bBnD+L4t+VnfFiRFqxc3kWBVlJMBQJ4thni7qrmKiNQHe002evHFF2nbti1hYWEMGjSIFStWnHH9999/n65duxIWFkavXr2YO3dujYIVaQrsdhuDOyTyt2t6s/L+4bz483MY3i2JELuNDfvzePSzTZz31/nc/NoK5qzdT5G7zOqQRaSBleSanZrkO+KsDUREJIBVu0Tn3XffZcqUKcyYMYNBgwbxzDPPMGrUKLZs2UJSUtJJ63/33XfceOONTJ8+nSuuuIKZM2cyfvx41qxZQ8+ePevkTYg0VmFOB5f3TuXy3qlkFZTy2fqDzF67n7V7cli09TCLth4mNMROfISTcKeD8NAQwp12wkMdhDtDCA91EOF0EB7qIMzpICLUQbjTQdhxy2PCnMRFVEyhRIY6sNnUJkikMSvLM6uuFTvjLY5ERCRwVTvReeqpp7j99tv5xS9+AcCMGTP47LPPeO211/jTn/500vrPPvssl112Gffeey8AjzzyCOnp6bzwwgvMmDGjluGLNB2JUS5uHtyWmwe3ZeeRQuas3c+cdfvZnVVEZl5pnR3H6bARGx5KfHnyc/zzuIhQ87F8WVioA7vNhg3MR5vZbbbdZvPP223muETHr2MvT6QMA3yGgdcwMAwDX/m8z1f+eNwy/+s+8zHEYcNhtxFir3i0++dDHJXnHY7K63m8PkrLfJSWeSn1HPe8zFc+X/68zEep57jnZV5s2HCF2HE57YSFOCo9ukIchJU/ukLshDkrP6pTCakrRuFhAEpdap8jIlJfqpXouN1uVq9ezdSpU/3L7HY7w4cPZ+nSpafcZunSpUyZMqXSslGjRjFnzpzTHqe0tJTS0mNf/PLyzC44PR4PHo+nOiH7tzv+UaQu1eT8ahUbyuSL2zHporbsyS6moLSMYo/XnNxeij2+8kcvJf5lJy8vcnvJK/GQW1zG0SI3Hq+Bx2twpKCUIwV1lzyJyWG3YfcnghWJ3/HPT3zt2LytfB7AxvHP4VgB3OmWQ36+g+e2LwZsGAaAmTAaBhgY5Y9AeUJpAIZhlC+Dr/9wAU5H9Wsr63OzftiLjgDgDW9mcSQiIoGrWonOkSNH8Hq9JCdXHsU5OTmZzZs3n3KbjIyMU66fkZFx2uNMnz6dadOmnbR83rx5REREVCfkStLT02u8rcjZ1OX55SyfYk73wikYBrh9UFQGhWVQVGY77jkUeWzHnpeZzz2+ii/E5Y/HPz9hGYDvuOeGwbESIMqfc9x8xfOK5SesY5b0gNc49txngJfjXztzCYoNgxA7OG2Yj/bjHm3gtBuV5kPKv+eX+cz37vFBmWE79txn/g3LfOAxzGW+42Lw+gy85rs/Y1z1wwbFRTXe+vPPv/C//+ooKqr5MeX0+jcrgyOQ1irN6lBERAJWo+x1berUqZVKgfLy8khLS2PkyJHExJz01e+sPB4P6enpjBgxAqdTI9dL3dL5Vb98PoMyn4G3/LHM5yPEbscVYsfpsNV7e6Qyrw+310eJx3w0DPzV8PxV9nwV1fM4obre8VX7zP35S1k4VhpT8ZzTLPeUlbFmzRoG9O+PMyTEn2BWlAyZyaPtWDJ5/PPydbqlRNeo6l1FibrUrYhL7+U7TxsGDrjG6lBERAJWtRKdZs2a4XA4yMzMrLQ8MzOTlJRTD3iWkpJSrfUBXC4XLpfrpOVOp7NWXyRru73Imej8qj8nfxo0HKcTwoFYC2PweDwU7jC4oHNSg59jOqfrSXxbDsf0hIT2VkciIhKwqlWRITQ0lP79+zN//nz/Mp/Px/z58xk8ePAptxk8eHCl9cGs4nO69UVERERERGqr2lXXpkyZwi233MKAAQM499xzeeaZZygsLPT3wnbzzTfTsmVLpk+fDsDvfvc7LrroIp588kkuv/xyZs2axapVq3jllVfq9p2IiIiIiIiUq3aic/3113P48GEefPBBMjIy6Nu3L1988YW/w4E9e/Zgtx8rKBoyZAgzZ87k/vvv589//jOdOnVizpw5GkNHRERERETqTY06I5g8eTKTJ08+5WsLFy48adm1117LtddeW5NDiYiIiIiIVFsNOhsVERERERFp3JToiIiIiIhIwFGiIyIiIiIiAUeJjoiIiIiIBBwlOiIiIiIiEnCU6IiIiIiISMBRoiMiIiIiIgFHiY6IiIiIiAQcJToiIiIiIhJwlOiIiIiIiEjACbE6gKowDAOAvLy8Gm3v8XgoKioiLy8Pp9NZl6GJ6PySemflOVbxuVvxOSwm3ZeksdM5JvXJ6vOrqvemJpHo5OfnA5CWlmZxJCIiwSk/P5/Y2Firw2g0dF8SEbHe2e5NNqMJ/Ezn8/k4cOAA0dHR2Gy2am+fl5dHWloae/fuJSYmph4ilGCm80vqm5XnmGEY5Ofn06JFC+x21XauoPuSNHY6x6Q+WX1+VfXe1CRKdOx2O61atar1fmJiYnSxS73R+SX1zapzTCU5J9N9SZoKnWNSn6w8v6pyb9LPcyIiIiIiEnCU6IiIiIiISMAJikTH5XLx0EMP4XK5rA5FApDOL6lvOscCj/6nUt90jkl9airnV5PojEBERERERKQ6gqJER0REREREgosSHRERERERCThBlejYbDbmzJljdRgSoHR+SUPatWsXNpuNdevWWR2K1JI+O6Q+6fyShtTY7k0Bl+i8+OKLtG3blrCwMAYNGsSKFSusDkkCxF/+8hdsNlulqWvXrlaHJU3UokWLGDt2LC1atDjlFxHDMHjwwQdJTU0lPDyc4cOHs23bNmuClVrTvUnqi+5NUpcC7d4UUInOu+++y5QpU3jooYdYs2YNffr0YdSoURw6dMjq0CRA9OjRg4MHD/qnxYsXWx2SNFGFhYX06dOHF1988ZSvP/HEEzz33HPMmDGD5cuXExkZyahRoygpKWngSKW2dG+S+qZ7k9SVQLs3BVSi89RTT3H77bfzi1/8gu7duzNjxgwiIiJ47bXXTrn+Qw89RGpqKj/88EMDRypNVUhICCkpKf6pWbNmp11X55ecyejRo3n00Ue58sorT3rNMAyeeeYZ7r//fsaNG0fv3r156623OHDgwGmroHi9Xn75y1/StWtX9uzZU8/RS3Xo3iT1TfcmqSuBdm8KmETH7XazevVqhg8f7l9mt9sZPnw4S5curbSuYRjcddddvPXWW3z77bf07t27ocOVJmrbtm20aNGC9u3bM2HChFNetDq/pLZ27txJRkZGpc+z2NhYBg0adNLnGUBpaSnXXnst69at49tvv6V169YNGa6cge5N0hB0b5KG0BTvTSENfsR6cuTIEbxeL8nJyZWWJycns3nzZv98WVkZN910E2vXrmXx4sW0bNmyoUOVJmrQoEG88cYbdOnShYMHDzJt2jQuuOACNmzYQHR0NKDzS+pGRkYGwCk/zypeq1BQUMDll19OaWkpX3/9NbGxsQ0Wp5yd7k1S33RvkobSFO9NAZPoVNXvf/97XC4Xy5YtO2PRrsiJRo8e7X/eu3dvBg0aRJs2bXjvvfe49dZbAZ1f0vBuvPFGWrVqxYIFCwgPD7c6HKkhfXZITeneJI1RY7k3BUzVtWbNmuFwOMjMzKy0PDMzk5SUFP/8iBEj2L9/P19++WVDhygBJi4ujs6dO7N9+3b/Mp1fUhcqPrPO9nkGMGbMGH744YdTVhsQ6+neJA1N9yapL03x3hQwiU5oaCj9+/dn/vz5/mU+n4/58+czePBg/7Kf/exnzJw5k9tuu41Zs2ZZEaoEiIKCAnbs2EFqaqp/mc4vqQvt2rUjJSWl0udZXl4ey5cvr/R5BvDrX/+axx9/nJ/97Gd88803DR2qnIXuTdLQdG+S+tIk701GAJk1a5bhcrmMN954w/jxxx+NO+64w4iLizMyMjIMwzAMwJg9e7ZhGIbx/vvvG2FhYcb7779vYcTSlPzhD38wFi5caOzcudNYsmSJMXz4cKNZs2bGoUOHDMPQ+SXVk5+fb6xdu9ZYu3atARhPPfWUsXbtWmP37t2GYRjG448/bsTFxRkff/yx8cMPPxjjxo0z2rVrZxQXFxuGYRg7d+40AGPt2rWGYRjG008/bURFRRnffvutVW9JTkP3JqlPujdJXQq0e1NAJTqGYRjPP/+80bp1ayM0NNQ499xzjWXLlvlfO/5iNwzDePfdd42wsDDjww8/tCBSaWquv/56IzU11QgNDTVatmxpXH/99cb27dv9r+v8kur4+uuvDeCk6ZZbbjEMwzB8Pp/xwAMPGMnJyYbL5TKGDRtmbNmyxb/9iTcTwzCMJ5980oiOjjaWLFnSwO9Gzkb3JqkvujdJXQq0e5PNMAyj4cqPRERERERE6l/AtNERERERERGpoERHREREREQCjhIdEREREREJOEp0REREREQk4CjRERERERGRgKNER0REREREAo4SHRERERERCThKdEREREREJOAo0RERERERkYCjREekjkycOJHx48dbHYaIiAig+5KIEh0REREREQk4SnREqumDDz6gV69ehIeHk5iYyPDhw7n33nt58803+fjjj7HZbNhsNhYuXAjA3r17ue6664iLiyMhIYFx48axa9cu//4qfnGbNm0azZs3JyYmhjvvvBO3223NGxQRkSZF9yWRUwuxOgCRpuTgwYPceOONPPHEE1x55ZXk5+fz7bffcvPNN7Nnzx7y8vJ4/fXXAUhISMDj8TBq1CgGDx7Mt9/+/3btJxS2MIzj+G8uncx0TPIvJR0bC2lSzEZTUqfESk1ZyIZsZScLhfzbKWVJbFjYYGVBcpopmbKZWShFhLKgiFOMv3endM2tWch17vezO52nt/etU0+/87xx5ebmanx8XC0tLUqlUjIMQ5K0tbWlvLw8OY6jk5MTdXd3q6ioSBMTE995XADAP46+BGRG0AGycHFxoefnZ0WjUVmWJUkKhUKSJL/fr3Q6rbKysvf6xcVFvb6+am5uTj6fT5K0sLCggoICOY6j5uZmSZJhGJqfn1cgEFBNTY1GR0fV39+vsbEx/frF4BUA8Dn6EpAZXyqQhdraWtm2rVAopPb2ds3Ozur6+jpjfTKZ1OHhofLz82WapkzTVGFhoR4eHnR0dPRh3UAg8P7c0NAg13V1dnb2pecBAPxs9CUgMyY6QBZycnK0ubmpnZ0dbWxsaGZmRoODg0okEp/Wu66r+vp6LS0t/fGupKTkq7cLAPA4+hKQGUEHyJLP51MkElEkEtHQ0JAsy9Lq6qoMw9DLy8uH2rq6Oi0vL6u0tFTBYDDjmslkUvf39/L7/ZKk3d1dmaapioqKLz0LAODnoy8Bn+PqGpCFRCKhyclJ7e3t6fT0VCsrK7q8vFR1dbUqKyuVSqV0cHCgq6srPT09qbOzU8XFxWpra1M8Htfx8bEcx1FfX5/Oz8/f1318fFRPT4/29/e1vr6u4eFh9fb2cg8aAPBX9CUgMyY6QBaCwaBisZimp6d1e3sry7I0NTWl1tZWhcNhOY6jcDgs13W1vb2tpqYmxWIxDQwMKBqN6u7uTuXl5bJt+8OfNNu2VVVVpcbGRqXTaXV0dGhkZOT7DgoA+BHoS0Bmvre3t7fv3gTwP+vq6tLNzY3W1ta+eysAANCX4BnMHwEAAAB4DkEHAAAAgOdwdQ0AAACA5zDRAQAAAOA5BB0AAAAAnkPQAQAAAOA5BB0AAAAAnkPQAQAAAOA5BB0AAAAAnkPQAQAAAOA5BB0AAAAAnkPQAQAAAOA5vwHlwqiQDxlTdQAAAABJRU5ErkJggg==\n"
     },
     "metadata": {}
    }
   ],
   "source": [
    "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "a =py7zr.SevenZipFile(r'./test.7z','r')\n",
    "a.extractall(path=r'./competitions/cifar-10/')\n",
    "a.close()"
   ],
   "metadata": {
    "id": "e0JxP6FE9L8Y"
   },
   "execution_count": 30,
   "outputs": []
  },
  {
   "cell_type": "code",
   "source": [
    "!ls competitions/cifar-10/test|wc -l"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "KSDxPyov9i-y",
    "outputId": "e7941401-cdbf-49cb-d88f-e8e33c45e61b"
   },
   "execution_count": 33,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "300000\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "!ls"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "H39dl-23-cil",
    "outputId": "6c61e8c5-e344-4074-8609-d508f496f584"
   },
   "execution_count": 34,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "competitions   sample_data\t     trainLabels.csv\n",
      "kaggle.json    sampleSubmission.csv  wangdao_deeplearning_train.py\n",
      "model_weights  test.7z\n",
      "__pycache__    train.7z\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-06-26T01:45:37.818553Z",
     "start_time": "2025-06-26T01:45:37.816716Z"
    },
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "Yvx48aMb4pNw",
    "outputId": "1d98cf1d-e767-4981-c780-625760d02dc4"
   },
   "outputs": [
    {
     "output_type": "stream",
     "name": "stderr",
     "text": [
      "/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
      "  warnings.warn(\n"
     ]
    },
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "正在预测测试集...\n"
     ]
    },
    {
     "output_type": "stream",
     "name": "stderr",
     "text": [
      "\r预测进度:   0%|          | 0/2344 [00:00<?, ?it/s]/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
      "  warnings.warn(\n",
      "预测进度: 100%|██████████| 2344/2344 [01:59<00:00, 19.64it/s]\n"
     ]
    },
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "id列是否有重复值: False\n",
      "预测完成，结果已保存至 cifar10_submission.csv\n"
     ]
    }
   ],
   "source": [
    "# 导入所需库\n",
    "import os\n",
    "import pandas as pd\n",
    "from PIL import Image\n",
    "import torch\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "from torchvision import transforms\n",
    "import tqdm\n",
    "\n",
    "# 定义测试数据集类\n",
    "class CIFAR10TestDataset(Dataset):\n",
    "    def __init__(self, img_dir, transform=None):\n",
    "        \"\"\"\n",
    "        初始化测试数据集\n",
    "\n",
    "        参数:\n",
    "            img_dir: 测试图片目录\n",
    "            transform: 图像预处理变换\n",
    "        \"\"\"\n",
    "        self.img_dir = img_dir\n",
    "        self.transform = transform\n",
    "        self.img_files = [f for f in os.listdir(img_dir) if f.endswith('.png')]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.img_files)\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        img_path = os.path.join(self.img_dir, self.img_files[idx])\n",
    "        image = Image.open(img_path).convert('RGB')\n",
    "\n",
    "        if self.transform:\n",
    "            image = self.transform(image)\n",
    "\n",
    "        # 提取图像ID（文件名去掉扩展名）\n",
    "        img_id = int(os.path.splitext(self.img_files[idx])[0])\n",
    "\n",
    "        return image, img_id\n",
    "\n",
    "# 定义预测函数\n",
    "def predict_test_set(model, img_dir, labels_file, device, batch_size=64):\n",
    "    \"\"\"\n",
    "    预测测试集并生成提交文件\n",
    "\n",
    "    参数:\n",
    "        model: 训练好的模型\n",
    "        img_dir: 测试图片目录\n",
    "        labels_file: 提交模板文件路径\n",
    "        device: 计算设备\n",
    "        batch_size: 批处理大小\n",
    "    \"\"\"\n",
    "    # 图像预处理变换（与训练集相同）\n",
    "    transform = transforms.Compose([\n",
    "        transforms.ToTensor(),\n",
    "        transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
    "    ])\n",
    "\n",
    "    # 创建测试数据集和数据加载器\n",
    "    test_dataset = CIFAR10TestDataset(img_dir, transform=transform)\n",
    "    test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=4)\n",
    "\n",
    "    # 设置模型为评估模式\n",
    "    model.eval()\n",
    "\n",
    "    # 读取提交模板\n",
    "    submission_df = pd.read_csv(labels_file)\n",
    "    predictions = {}\n",
    "\n",
    "    # 使用tqdm显示进度条\n",
    "    print(\"正在预测测试集...\")\n",
    "    with torch.no_grad():\n",
    "        for images, img_ids in tqdm.tqdm(test_loader, desc=\"预测进度\"):\n",
    "            images = images.to(device)\n",
    "            outputs = model(images)\n",
    "            _, predicted = torch.max(outputs, 1) #取最大的索引，作为预测结果\n",
    "\n",
    "            # 记录每个图像的预测结果\n",
    "            for i, img_id in enumerate(img_ids):\n",
    "                predictions[img_id.item()] = predicted[i].item() #因为一个批次有多个图像，所以需要predicted[i]\n",
    "\n",
    "    # 定义类别名称\n",
    "    class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
    "\n",
    "    # 将数值标签转换为类别名称\n",
    "    labeled_predictions = {img_id: class_names[pred] for img_id, pred in predictions.items()}\n",
    "\n",
    "    # 直接创建DataFrame\n",
    "    submission_df = pd.DataFrame({\n",
    "        'id': list(labeled_predictions.keys()),\n",
    "        'label': list(labeled_predictions.values())\n",
    "    })\n",
    "    # 按id列排序\n",
    "    submission_df = submission_df.sort_values(by='id')\n",
    "\n",
    "    # 检查id列是否有重复值\n",
    "    has_duplicates = submission_df['id'].duplicated().any()\n",
    "    print(f\"id列是否有重复值: {has_duplicates}\")\n",
    "    # 保存预测结果\n",
    "    output_file = 'cifar10_submission.csv'\n",
    "    submission_df.to_csv(output_file, index=False)\n",
    "    print(f\"预测完成，结果已保存至 {output_file}\")\n",
    "\n",
    "# 执行测试集预测\n",
    "img_dir = r\"competitions/cifar-10/test\"\n",
    "labels_file = r\"./sampleSubmission.csv\"\n",
    "predict_test_set(model, img_dir, labels_file, device, batch_size=128)\n"
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "!head -10 cifar10_submission.csv"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "f90sLQwP_o3I",
    "outputId": "bc0d41e4-2fe6-4211-d9ab-6de98d54831d"
   },
   "execution_count": 39,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "id,label\n",
      "1,deer\n",
      "2,airplane\n",
      "3,automobile\n",
      "4,ship\n",
      "5,bird\n",
      "6,cat\n",
      "7,airplane\n",
      "8,deer\n",
      "9,bird\n"
     ]
    }
   ]
  },
  {
   "cell_type": "code",
   "source": [
    "!wc -l cifar10_submission.csv"
   ],
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/"
    },
    "id": "R1w9Z1W-AOgY",
    "outputId": "1d1fbcba-704f-4949-82a9-3b1a028aa06c"
   },
   "execution_count": 40,
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "300001 cifar10_submission.csv\n"
     ]
    }
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.3"
  },
  "colab": {
   "provenance": [],
   "gpuType": "T4"
  },
  "accelerator": "GPU",
  "widgets": {
   "application/vnd.jupyter.widget-state+json": {
    "be06c2b352c14f5c9aee8a16d5b11e24": {
     "model_module": "@jupyter-widgets/controls",
     "model_name": "HBoxModel",
     "model_module_version": "1.5.0",
     "state": {
      "_dom_classes": [],
      "_model_module": "@jupyter-widgets/controls",
      "_model_module_version": "1.5.0",
      "_model_name": "HBoxModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/controls",
      "_view_module_version": "1.5.0",
      "_view_name": "HBoxView",
      "box_style": "",
      "children": [
       "IPY_MODEL_32813af8951f4c2b8dd4171a5937c5a9",
       "IPY_MODEL_9cb464da4e09479292889a917a7f436a",
       "IPY_MODEL_d36666c817b841f99576af80feb4a7ee"
      ],
      "layout": "IPY_MODEL_12492b59648c43e4bdbe100ddb3a3702"
     }
    },
    "32813af8951f4c2b8dd4171a5937c5a9": {
     "model_module": "@jupyter-widgets/controls",
     "model_name": "HTMLModel",
     "model_module_version": "1.5.0",
     "state": {
      "_dom_classes": [],
      "_model_module": "@jupyter-widgets/controls",
      "_model_module_version": "1.5.0",
      "_model_name": "HTMLModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/controls",
      "_view_module_version": "1.5.0",
      "_view_name": "HTMLView",
      "description": "",
      "description_tooltip": null,
      "layout": "IPY_MODEL_1ee3d0c200c64a17a46ee92c07e0e1b7",
      "placeholder": "​",
      "style": "IPY_MODEL_7ab573b1c9fd48a4bb67890514744bb0",
      "value": " 30%"
     }
    },
    "9cb464da4e09479292889a917a7f436a": {
     "model_module": "@jupyter-widgets/controls",
     "model_name": "FloatProgressModel",
     "model_module_version": "1.5.0",
     "state": {
      "_dom_classes": [],
      "_model_module": "@jupyter-widgets/controls",
      "_model_module_version": "1.5.0",
      "_model_name": "FloatProgressModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/controls",
      "_view_module_version": "1.5.0",
      "_view_name": "ProgressView",
      "bar_style": "danger",
      "description": "",
      "description_tooltip": null,
      "layout": "IPY_MODEL_8a284f1425f343c7b02cf5e060519df4",
      "max": 35200,
      "min": 0,
      "orientation": "horizontal",
      "style": "IPY_MODEL_c97b171f55714254a3bd3d0d73882031",
      "value": 10500
     }
    },
    "d36666c817b841f99576af80feb4a7ee": {
     "model_module": "@jupyter-widgets/controls",
     "model_name": "HTMLModel",
     "model_module_version": "1.5.0",
     "state": {
      "_dom_classes": [],
      "_model_module": "@jupyter-widgets/controls",
      "_model_module_version": "1.5.0",
      "_model_name": "HTMLModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/controls",
      "_view_module_version": "1.5.0",
      "_view_name": "HTMLView",
      "description": "",
      "description_tooltip": null,
      "layout": "IPY_MODEL_6e4a4ef865da440d92b72bcf10f8b877",
      "placeholder": "​",
      "style": "IPY_MODEL_e82290e7a17643cdb2eaf7df030265f1",
      "value": " 10500/35200 [12:04&lt;24:59, 16.48it/s, epoch=14, loss=0.0018, acc=100.00%]"
     }
    },
    "12492b59648c43e4bdbe100ddb3a3702": {
     "model_module": "@jupyter-widgets/base",
     "model_name": "LayoutModel",
     "model_module_version": "1.2.0",
     "state": {
      "_model_module": "@jupyter-widgets/base",
      "_model_module_version": "1.2.0",
      "_model_name": "LayoutModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/base",
      "_view_module_version": "1.2.0",
      "_view_name": "LayoutView",
      "align_content": null,
      "align_items": null,
      "align_self": null,
      "border": null,
      "bottom": null,
      "display": null,
      "flex": null,
      "flex_flow": null,
      "grid_area": null,
      "grid_auto_columns": null,
      "grid_auto_flow": null,
      "grid_auto_rows": null,
      "grid_column": null,
      "grid_gap": null,
      "grid_row": null,
      "grid_template_areas": null,
      "grid_template_columns": null,
      "grid_template_rows": null,
      "height": null,
      "justify_content": null,
      "justify_items": null,
      "left": null,
      "margin": null,
      "max_height": null,
      "max_width": null,
      "min_height": null,
      "min_width": null,
      "object_fit": null,
      "object_position": null,
      "order": null,
      "overflow": null,
      "overflow_x": null,
      "overflow_y": null,
      "padding": null,
      "right": null,
      "top": null,
      "visibility": null,
      "width": null
     }
    },
    "1ee3d0c200c64a17a46ee92c07e0e1b7": {
     "model_module": "@jupyter-widgets/base",
     "model_name": "LayoutModel",
     "model_module_version": "1.2.0",
     "state": {
      "_model_module": "@jupyter-widgets/base",
      "_model_module_version": "1.2.0",
      "_model_name": "LayoutModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/base",
      "_view_module_version": "1.2.0",
      "_view_name": "LayoutView",
      "align_content": null,
      "align_items": null,
      "align_self": null,
      "border": null,
      "bottom": null,
      "display": null,
      "flex": null,
      "flex_flow": null,
      "grid_area": null,
      "grid_auto_columns": null,
      "grid_auto_flow": null,
      "grid_auto_rows": null,
      "grid_column": null,
      "grid_gap": null,
      "grid_row": null,
      "grid_template_areas": null,
      "grid_template_columns": null,
      "grid_template_rows": null,
      "height": null,
      "justify_content": null,
      "justify_items": null,
      "left": null,
      "margin": null,
      "max_height": null,
      "max_width": null,
      "min_height": null,
      "min_width": null,
      "object_fit": null,
      "object_position": null,
      "order": null,
      "overflow": null,
      "overflow_x": null,
      "overflow_y": null,
      "padding": null,
      "right": null,
      "top": null,
      "visibility": null,
      "width": null
     }
    },
    "7ab573b1c9fd48a4bb67890514744bb0": {
     "model_module": "@jupyter-widgets/controls",
     "model_name": "DescriptionStyleModel",
     "model_module_version": "1.5.0",
     "state": {
      "_model_module": "@jupyter-widgets/controls",
      "_model_module_version": "1.5.0",
      "_model_name": "DescriptionStyleModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/base",
      "_view_module_version": "1.2.0",
      "_view_name": "StyleView",
      "description_width": ""
     }
    },
    "8a284f1425f343c7b02cf5e060519df4": {
     "model_module": "@jupyter-widgets/base",
     "model_name": "LayoutModel",
     "model_module_version": "1.2.0",
     "state": {
      "_model_module": "@jupyter-widgets/base",
      "_model_module_version": "1.2.0",
      "_model_name": "LayoutModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/base",
      "_view_module_version": "1.2.0",
      "_view_name": "LayoutView",
      "align_content": null,
      "align_items": null,
      "align_self": null,
      "border": null,
      "bottom": null,
      "display": null,
      "flex": null,
      "flex_flow": null,
      "grid_area": null,
      "grid_auto_columns": null,
      "grid_auto_flow": null,
      "grid_auto_rows": null,
      "grid_column": null,
      "grid_gap": null,
      "grid_row": null,
      "grid_template_areas": null,
      "grid_template_columns": null,
      "grid_template_rows": null,
      "height": null,
      "justify_content": null,
      "justify_items": null,
      "left": null,
      "margin": null,
      "max_height": null,
      "max_width": null,
      "min_height": null,
      "min_width": null,
      "object_fit": null,
      "object_position": null,
      "order": null,
      "overflow": null,
      "overflow_x": null,
      "overflow_y": null,
      "padding": null,
      "right": null,
      "top": null,
      "visibility": null,
      "width": null
     }
    },
    "c97b171f55714254a3bd3d0d73882031": {
     "model_module": "@jupyter-widgets/controls",
     "model_name": "ProgressStyleModel",
     "model_module_version": "1.5.0",
     "state": {
      "_model_module": "@jupyter-widgets/controls",
      "_model_module_version": "1.5.0",
      "_model_name": "ProgressStyleModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/base",
      "_view_module_version": "1.2.0",
      "_view_name": "StyleView",
      "bar_color": null,
      "description_width": ""
     }
    },
    "6e4a4ef865da440d92b72bcf10f8b877": {
     "model_module": "@jupyter-widgets/base",
     "model_name": "LayoutModel",
     "model_module_version": "1.2.0",
     "state": {
      "_model_module": "@jupyter-widgets/base",
      "_model_module_version": "1.2.0",
      "_model_name": "LayoutModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/base",
      "_view_module_version": "1.2.0",
      "_view_name": "LayoutView",
      "align_content": null,
      "align_items": null,
      "align_self": null,
      "border": null,
      "bottom": null,
      "display": null,
      "flex": null,
      "flex_flow": null,
      "grid_area": null,
      "grid_auto_columns": null,
      "grid_auto_flow": null,
      "grid_auto_rows": null,
      "grid_column": null,
      "grid_gap": null,
      "grid_row": null,
      "grid_template_areas": null,
      "grid_template_columns": null,
      "grid_template_rows": null,
      "height": null,
      "justify_content": null,
      "justify_items": null,
      "left": null,
      "margin": null,
      "max_height": null,
      "max_width": null,
      "min_height": null,
      "min_width": null,
      "object_fit": null,
      "object_position": null,
      "order": null,
      "overflow": null,
      "overflow_x": null,
      "overflow_y": null,
      "padding": null,
      "right": null,
      "top": null,
      "visibility": null,
      "width": null
     }
    },
    "e82290e7a17643cdb2eaf7df030265f1": {
     "model_module": "@jupyter-widgets/controls",
     "model_name": "DescriptionStyleModel",
     "model_module_version": "1.5.0",
     "state": {
      "_model_module": "@jupyter-widgets/controls",
      "_model_module_version": "1.5.0",
      "_model_name": "DescriptionStyleModel",
      "_view_count": null,
      "_view_module": "@jupyter-widgets/base",
      "_view_module_version": "1.2.0",
      "_view_name": "StyleView",
      "description_width": ""
     }
    }
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}
