{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "uGV2VjXF4pNs"
      },
      "source": [
        "# 查看FashionMNIST原始数据格式"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:32.363026Z",
          "start_time": "2025-06-26T01:43:29.447990Z"
        },
        "id": "3djTfPq64pNt"
      },
      "outputs": [],
      "source": [
        "import torch\n",
        "import torchvision\n",
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "from torchvision import datasets, transforms\n",
        "from deeplearning_func import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
        "from deeplearning_func import evaluate_classification_model as evaluate_model\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "ua9T8MLp3ODM",
        "outputId": "7cdec783-6212-433a-bb62-49a148e2f212"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "输入张量的形状: torch.Size([1, 3, 6, 6])\n",
            "输出张量的形状: torch.Size([1, 3, 2, 2])\n"
          ]
        }
      ],
      "source": [
        "import torch\n",
        "import torch.nn as nn\n",
        "import matplotlib.pyplot as plt\n",
        "\n",
        "# 创建一个随机输入张量，模拟特征图\n",
        "# 形状为 [1, 3, 6, 6]，表示1个样本，3个通道，6x6的特征图\n",
        "input_tensor = torch.randn(1, 3, 6, 6)\n",
        "print(\"输入张量的形状:\", input_tensor.shape)\n",
        "\n",
        "# 创建 AdaptiveAvgPool2d 层，指定输出大小为 2x2\n",
        "adaptive_pool = nn.AdaptiveAvgPool2d(output_size=(2, 2))\n",
        "\n",
        "# 对输入张量进行自适应平均池化\n",
        "output_tensor = adaptive_pool(input_tensor)\n",
        "print(\"输出张量的形状:\", output_tensor.shape)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 4,
      "metadata": {
        "id": "Fi46_oyAY6qD"
      },
      "outputs": [],
      "source": [
        "import json\n",
        "token = {\"username\":\"cskaoyan\",\"key\":\"ff99d9d7ff71704e3e761217ceec03c5\"}\n",
        "with open('/content/kaggle.json', 'w') as file:\n",
        "  json.dump(token, file)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 5,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "FBunE0OvY6ZY",
        "outputId": "cab43f6e-ea68-48c3-fab8-08d1405d4b90"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "{\"username\": \"cskaoyan\", \"key\": \"ff99d9d7ff71704e3e761217ceec03c5\"}"
          ]
        }
      ],
      "source": [
        "!cat /content/kaggle.json"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 6,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "qXgB8rdbZIDU",
        "outputId": "5895d065-2de9-4c1e-d9ce-6550da5aac03"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "- path is now set to: /content\n"
          ]
        }
      ],
      "source": [
        "!mkdir -p ~/.kaggle\n",
        "!cp /content/kaggle.json ~/.kaggle/\n",
        "!chmod 600 ~/.kaggle/kaggle.json\n",
        "!kaggle config set -n path -v /content"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 7,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "4feg3Y3_2IJC",
        "outputId": "ad1387e6-6abf-4de9-bb3c-951c9438bcd3"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Downloading cifar-10.zip to /content/competitions/cifar-10\n",
            "100% 715M/715M [00:07<00:00, 45.3MB/s]\n",
            "100% 715M/715M [00:07<00:00, 105MB/s] \n"
          ]
        }
      ],
      "source": [
        "!kaggle competitions download -c cifar-10"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 8,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "QDeB7tM12b9K",
        "outputId": "b0ac5f4b-21a7-41b6-beaa-a2d7d379b20f"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Archive:  /content/competitions/cifar-10/cifar-10.zip\n",
            "  inflating: sampleSubmission.csv    \n",
            "  inflating: test.7z                 \n",
            "  inflating: train.7z                \n",
            "  inflating: trainLabels.csv         \n"
          ]
        }
      ],
      "source": [
        "!unzip /content/competitions/cifar-10/cifar-10.zip"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 9,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "NK7iEl7I2bRK",
        "outputId": "dfac54c0-45e6-48ef-8dc6-099301fd3a5d"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "Collecting py7zr\n",
            "  Downloading py7zr-1.0.0-py3-none-any.whl.metadata (17 kB)\n",
            "Collecting texttable (from py7zr)\n",
            "  Downloading texttable-1.7.0-py2.py3-none-any.whl.metadata (9.8 kB)\n",
            "Requirement already satisfied: pycryptodomex>=3.20.0 in /usr/local/lib/python3.11/dist-packages (from py7zr) (3.23.0)\n",
            "Collecting brotli>=1.1.0 (from py7zr)\n",
            "  Downloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.5 kB)\n",
            "Requirement already satisfied: psutil in /usr/local/lib/python3.11/dist-packages (from py7zr) (5.9.5)\n",
            "Collecting pyzstd>=0.16.1 (from py7zr)\n",
            "  Downloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.5 kB)\n",
            "Collecting pyppmd<1.3.0,>=1.1.0 (from py7zr)\n",
            "  Downloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.4 kB)\n",
            "Collecting pybcj<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.7 kB)\n",
            "Collecting multivolumefile>=0.2.3 (from py7zr)\n",
            "  Downloading multivolumefile-0.2.3-py3-none-any.whl.metadata (6.3 kB)\n",
            "Collecting inflate64<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.4 kB)\n",
            "Requirement already satisfied: typing-extensions>=4.13.2 in /usr/local/lib/python3.11/dist-packages (from pyzstd>=0.16.1->py7zr) (4.14.0)\n",
            "Downloading py7zr-1.0.0-py3-none-any.whl (69 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m69.7/69.7 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.9 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.9/2.9 MB\u001b[0m \u001b[31m63.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (96 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m96.4/96.4 kB\u001b[0m \u001b[31m10.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading multivolumefile-0.2.3-py3-none-any.whl (17 kB)\n",
            "Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (50 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.7/50.7 kB\u001b[0m \u001b[31m4.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (141 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m141.3/141.3 kB\u001b[0m \u001b[31m14.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m412.9/412.9 kB\u001b[0m \u001b[31m36.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading texttable-1.7.0-py2.py3-none-any.whl (10 kB)\n",
            "Installing collected packages: texttable, brotli, pyzstd, pyppmd, pybcj, multivolumefile, inflate64, py7zr\n",
            "Successfully installed brotli-1.1.0 inflate64-1.0.3 multivolumefile-0.2.3 py7zr-1.0.0 pybcj-1.0.6 pyppmd-1.2.0 pyzstd-0.17.0 texttable-1.7.0\n"
          ]
        }
      ],
      "source": [
        "%pip install py7zr\n",
        "import py7zr\n",
        "a =py7zr.SevenZipFile(r'./train.7z','r')\n",
        "a.extractall(path=r'./competitions/cifar-10/')\n",
        "a.close()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 10,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "rI5JDfji59q-",
        "outputId": "fb66f021-a8e4-418e-cb1e-9bceab4f141c"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "50000\n"
          ]
        }
      ],
      "source": [
        "!ls competitions/cifar-10/train|wc -l"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "vk4EQTiM4pNt"
      },
      "source": [
        "# 加载数据并处理为tensor"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 11,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:32.407799Z",
          "start_time": "2025-06-26T01:43:32.363026Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "nvguuJLl4pNt",
        "outputId": "6105f992-8211-4da7-a52d-39dc56fab167"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "完整数据集大小: 50000\n",
            "训练集大小: 45000\n",
            "验证集大小: 5000\n"
          ]
        }
      ],
      "source": [
        "# 加载CIFAR-10数据集\n",
        "import os\n",
        "import pandas as pd\n",
        "from PIL import Image\n",
        "from torch.utils.data import Dataset\n",
        "\n",
        "# 定义CIFAR-10数据集类\n",
        "class CIFAR10Dataset(Dataset):\n",
        "    def __init__(self, img_dir, labels_file, transform=None):\n",
        "        self.img_dir = img_dir\n",
        "        self.transform = transform\n",
        "\n",
        "        # 读取标签文件，read_csv默认读取第一行作为列名\n",
        "        self.labels_df = pd.read_csv(labels_file)\n",
        "        self.img_names = self.labels_df.iloc[:, 0].values.astype(str)  # 第一列是图片名称，确保为字符串类型\n",
        "\n",
        "        # 类别名称字典，使用字典可以提高查找速度\n",
        "        self.class_names_dict = {'airplane': 0, 'automobile': 1, 'bird': 2, 'cat': 3,\n",
        "                                 'deer': 4, 'dog': 5, 'frog': 6, 'horse': 7, 'ship': 8, 'truck': 9}\n",
        "        # 将文本标签转换为数字ID\n",
        "        self.labels = [self.class_names_dict[label] for label in self.labels_df.iloc[:, 1].values]\n",
        "\n",
        "    def __len__(self):\n",
        "        return len(self.labels)\n",
        "\n",
        "    def __getitem__(self, idx):\n",
        "        img_path = os.path.join(self.img_dir, self.img_names[idx] + '.png') #图片路径\n",
        "        image = Image.open(img_path) #打开图片\n",
        "        label = self.labels[idx]\n",
        "\n",
        "        if self.transform:\n",
        "            image_tensor = self.transform(image)\n",
        "\n",
        "        return image_tensor, label\n",
        "\n",
        "# 定义数据预处理\n",
        "transform = transforms.Compose([\n",
        "    transforms.ToTensor(),\n",
        "    transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
        "])\n",
        "\n",
        "# colab加载CIFAR-10数据集\n",
        "img_dir = r\"competitions/cifar-10/train\"\n",
        "labels_file = r\"./trainLabels.csv\"\n",
        "\n",
        "# img_dir = r\"D:\\cifar-10\\train\\train\"\n",
        "# labels_file = r\"D:\\cifar-10\\trainLabels.csv\"\n",
        "full_dataset = CIFAR10Dataset(img_dir=img_dir, labels_file=labels_file, transform=transform)\n",
        "\n",
        "# 定义类别名称\n",
        "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
        "\n",
        "# 划分训练集和验证集\n",
        "train_size = 45000\n",
        "val_size = 5000\n",
        "generator = torch.Generator().manual_seed(42)\n",
        "train_dataset, val_dataset = torch.utils.data.random_split(\n",
        "    full_dataset,\n",
        "    [train_size, val_size],\n",
        "    generator=generator\n",
        ")\n",
        "\n",
        "# 查看数据集基本信息\n",
        "print(f\"完整数据集大小: {len(full_dataset)}\")\n",
        "print(f\"训练集大小: {len(train_dataset)}\")\n",
        "print(f\"验证集大小: {len(val_dataset)}\")\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 12,
      "metadata": {
        "id": "1akKUts84pNu"
      },
      "outputs": [],
      "source": [
        "def cal_mean_std(ds):\n",
        "    mean = 0.\n",
        "    std = 0.\n",
        "    for img, _ in ds:\n",
        "        mean += img.mean(dim=(1, 2)) #dim=(1, 2)表示在通道维度上求平均\n",
        "        std += img.std(dim=(1, 2))  #dim=(1, 2)表示在通道维度上求标准差\n",
        "    mean /= len(ds)\n",
        "    std /= len(ds)\n",
        "    return mean, std\n",
        "# cal_mean_std(train_dataset)"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "HrTSD6iw4pNu"
      },
      "source": [
        "# 把数据集划分为训练集45000和验证集5000，并给DataLoader"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 13,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.144223Z",
          "start_time": "2025-06-26T01:43:33.135368Z"
        },
        "id": "qK_zQ__r4pNu"
      },
      "outputs": [],
      "source": [
        "\n",
        "# 创建数据加载器\n",
        "batch_size = 64\n",
        "train_loader = torch.utils.data.DataLoader(\n",
        "    train_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
        ")\n",
        "\n",
        "val_loader = torch.utils.data.DataLoader(\n",
        "    val_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=False\n",
        ")\n",
        "\n",
        "\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "KUyAkERd4pNu"
      },
      "source": [
        "# 搭建模型"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 14,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "j17TXWWx4pNu",
        "outputId": "c7ebea5d-4e77-4186-c24a-b9221a1f27f1"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "torch.Size([20, 100])\n"
          ]
        }
      ],
      "source": [
        "#理解每个接口的方法，单独写例子\n",
        "import torch.nn as nn\n",
        "m=nn.BatchNorm1d(100)\n",
        "x=torch.randn(20,100)\n",
        "print(m(x).shape)"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "cFvbdkKd4pNu"
      },
      "source": [
        "# 复现VGG11简单版"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 15,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.152657Z",
          "start_time": "2025-06-26T01:43:33.148120Z"
        },
        "id": "UOfee2qW4pNu"
      },
      "outputs": [],
      "source": [
        "import torch.nn as nn\n",
        "import torch.nn.functional as F\n",
        "\n",
        "class VGG11(nn.Module):\n",
        "    def __init__(self):\n",
        "        super().__init__()\n",
        "\n",
        "        # VGG11架构的卷积层配置\n",
        "        # 第一个卷积块 - 1个卷积层\n",
        "        self.conv_block1 = nn.Sequential(\n",
        "            nn.Conv2d(3, 64, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(64),\n",
        "            nn.ReLU(),\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        )\n",
        "\n",
        "        # 第二个卷积块 - 1个卷积层\n",
        "        self.conv_block2 = nn.Sequential(\n",
        "            nn.Conv2d(64, 128, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(128),\n",
        "            nn.ReLU(),\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        )\n",
        "\n",
        "        # 第三个卷积块 - 2个卷积层\n",
        "        self.conv_block3 = nn.Sequential(\n",
        "            nn.Conv2d(128, 256, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(256),\n",
        "            nn.ReLU(),\n",
        "            nn.Conv2d(256, 256, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(256),\n",
        "            nn.ReLU(),\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        )\n",
        "\n",
        "        # 第四个卷积块 - 2个卷积层\n",
        "        self.conv_block4 = nn.Sequential(\n",
        "            nn.Conv2d(256, 512, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(512),\n",
        "            nn.ReLU(),\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(512),\n",
        "            nn.ReLU(),\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        )\n",
        "\n",
        "        # 第五个卷积块 - 2个卷积层\n",
        "        self.conv_block5 = nn.Sequential(\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(512),\n",
        "            nn.ReLU(),\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),\n",
        "            nn.BatchNorm2d(512),\n",
        "            nn.ReLU(),\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)\n",
        "        )\n",
        "\n",
        "        # 全连接层 - 适配CIFAR-10的10个类别\n",
        "        self.classifier = nn.Sequential(\n",
        "            nn.Linear(512, 4096),\n",
        "            nn.ReLU(),\n",
        "            nn.Dropout(0.5),\n",
        "            nn.Linear(4096, 1024),\n",
        "            nn.ReLU(),\n",
        "            nn.Dropout(0.5),\n",
        "            nn.Linear(1024, 10)  # 10分类\n",
        "        )\n",
        "\n",
        "        # 初始化权重\n",
        "        self.init_weights()\n",
        "\n",
        "    def init_weights(self):\n",
        "        \"\"\"使用xavier均匀分布初始化权重\"\"\"\n",
        "        for m in self.modules():\n",
        "            if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):\n",
        "                nn.init.xavier_uniform_(m.weight)\n",
        "                if m.bias is not None:\n",
        "                    nn.init.zeros_(m.bias)\n",
        "\n",
        "    def forward(self, x):\n",
        "        # 卷积块前向传播\n",
        "        x = self.conv_block1(x)\n",
        "        x = self.conv_block2(x)\n",
        "        x = self.conv_block3(x)\n",
        "        x = self.conv_block4(x)\n",
        "        x = self.conv_block5(x)\n",
        "\n",
        "        # 展平操作 - CIFAR-10经过5次下采样后为1x1\n",
        "        x = x.view(x.size(0), -1)\n",
        "\n",
        "        # 分类器\n",
        "        x = self.classifier(x)\n",
        "\n",
        "        return x\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 16,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.185031Z",
          "start_time": "2025-06-26T01:43:33.152657Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "5Ll8FXqD4pNv",
        "outputId": "6e8980fc-20c8-4c69-8f41-31dfcb15316f"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "批次图像形状: torch.Size([64, 3, 32, 32])\n",
            "批次标签形状: torch.Size([64])\n",
            "----------------------------------------------------------------------------------------------------\n",
            "torch.Size([64, 10])\n"
          ]
        }
      ],
      "source": [
        "# 实例化模型\n",
        "model = VGG11()\n",
        "\n",
        "# 从train_loader获取第一个批次的数据\n",
        "dataiter = iter(train_loader)\n",
        "images, labels = next(dataiter)\n",
        "\n",
        "# 查看批次数据的形状\n",
        "print(\"批次图像形状:\", images.shape)\n",
        "print(\"批次标签形状:\", labels.shape)\n",
        "\n",
        "\n",
        "print('-'*100)\n",
        "# 进行前向传播\n",
        "with torch.no_grad():  # 不需要计算梯度\n",
        "    outputs = model(images)\n",
        "\n",
        "\n",
        "print(outputs.shape)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 17,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.203053Z",
          "start_time": "2025-06-26T01:43:33.199532Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "V8zEsAla4pNv",
        "outputId": "a8e25a52-093b-494f-ca19-637a03ade36e"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "需要求梯度的参数总量: 15532810\n",
            "模型总参数量: 15532810\n",
            "\n",
            "各层参数量明细:\n",
            "conv_block1.0.weight: 1728 参数\n",
            "conv_block1.0.bias: 64 参数\n",
            "conv_block1.1.weight: 64 参数\n",
            "conv_block1.1.bias: 64 参数\n",
            "conv_block2.0.weight: 73728 参数\n",
            "conv_block2.0.bias: 128 参数\n",
            "conv_block2.1.weight: 128 参数\n",
            "conv_block2.1.bias: 128 参数\n",
            "conv_block3.0.weight: 294912 参数\n",
            "conv_block3.0.bias: 256 参数\n",
            "conv_block3.1.weight: 256 参数\n",
            "conv_block3.1.bias: 256 参数\n",
            "conv_block3.3.weight: 589824 参数\n",
            "conv_block3.3.bias: 256 参数\n",
            "conv_block3.4.weight: 256 参数\n",
            "conv_block3.4.bias: 256 参数\n",
            "conv_block4.0.weight: 1179648 参数\n",
            "conv_block4.0.bias: 512 参数\n",
            "conv_block4.1.weight: 512 参数\n",
            "conv_block4.1.bias: 512 参数\n",
            "conv_block4.3.weight: 2359296 参数\n",
            "conv_block4.3.bias: 512 参数\n",
            "conv_block4.4.weight: 512 参数\n",
            "conv_block4.4.bias: 512 参数\n",
            "conv_block5.0.weight: 2359296 参数\n",
            "conv_block5.0.bias: 512 参数\n",
            "conv_block5.1.weight: 512 参数\n",
            "conv_block5.1.bias: 512 参数\n",
            "conv_block5.3.weight: 2359296 参数\n",
            "conv_block5.3.bias: 512 参数\n",
            "conv_block5.4.weight: 512 参数\n",
            "conv_block5.4.bias: 512 参数\n",
            "classifier.0.weight: 2097152 参数\n",
            "classifier.0.bias: 4096 参数\n",
            "classifier.3.weight: 4194304 参数\n",
            "classifier.3.bias: 1024 参数\n",
            "classifier.6.weight: 10240 参数\n",
            "classifier.6.bias: 10 参数\n"
          ]
        }
      ],
      "source": [
        "# 计算模型的总参数量\n",
        "# 统计需要求梯度的参数总量\n",
        "total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
        "print(f\"需要求梯度的参数总量: {total_params}\")\n",
        "\n",
        "# 统计所有参数总量\n",
        "all_params = sum(p.numel() for p in model.parameters())\n",
        "print(f\"模型总参数量: {all_params}\")\n",
        "\n",
        "# 查看每层参数量明细\n",
        "print(\"\\n各层参数量明细:\")\n",
        "for name, param in model.named_parameters():\n",
        "    print(f\"{name}: {param.numel()} 参数\")\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 18,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "0XQuUiCe4pNv",
        "outputId": "5a51c636-6387-4d0d-e880-5dd01513c170"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "294912"
            ]
          },
          "execution_count": 18,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "128*3*3*256"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "1B2dFDE14pNv"
      },
      "source": [
        "# 各层参数量明细:\n",
        "conv1.weight: 288 参数 3*3*1*32\n",
        "conv1.bias: 32 参数\n",
        "conv2.weight: 9216 参数 3*3*32*32\n",
        "conv2.bias: 32 参数  \n",
        "conv3.weight: 18432 参数 3*3*32*64\n",
        "conv3.bias: 64 参数\n",
        "conv4.weight: 36864 参数  3*3*64*64\n",
        "conv4.bias: 64 参数\n",
        "conv5.weight: 73728 参数\n",
        "conv5.bias: 128 参数\n",
        "conv6.weight: 147456 参数\n",
        "conv6.bias: 128 参数\n",
        "fc1.weight: 294912 参数 128*3*3*256\n",
        "fc1.bias: 256 参数\n",
        "fc2.weight: 2560 参数\n",
        "fc2.bias: 10 参数"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 19,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.217395Z",
          "start_time": "2025-06-26T01:43:33.203561Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "al9xZTJQ4pNv",
        "outputId": "338b360d-b9e6-4dde-bd0a-6b8cd02d52e1"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "OrderedDict([('conv_block1.0.weight',\n",
              "              tensor([[[[ 0.0028,  0.0078,  0.0442],\n",
              "                        [-0.0986,  0.0879,  0.0122],\n",
              "                        [-0.0368, -0.0590, -0.0346]],\n",
              "              \n",
              "                       [[ 0.0268, -0.0082,  0.0600],\n",
              "                        [ 0.0061, -0.0936, -0.0336],\n",
              "                        [-0.0574,  0.0832, -0.0316]],\n",
              "              \n",
              "                       [[-0.0422,  0.0544, -0.0609],\n",
              "                        [ 0.0104,  0.0675,  0.0935],\n",
              "                        [ 0.0631, -0.0390,  0.0218]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0932,  0.0779, -0.0382],\n",
              "                        [ 0.0173,  0.0740,  0.0403],\n",
              "                        [ 0.0646,  0.0486,  0.0380]],\n",
              "              \n",
              "                       [[-0.0537, -0.0492,  0.0778],\n",
              "                        [-0.0734,  0.0022, -0.0086],\n",
              "                        [ 0.0326, -0.0873, -0.0674]],\n",
              "              \n",
              "                       [[-0.0191, -0.0970,  0.0986],\n",
              "                        [-0.0974, -0.0375,  0.0490],\n",
              "                        [-0.0548,  0.0427, -0.0162]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0409, -0.0326, -0.0596],\n",
              "                        [-0.0136, -0.0307,  0.0948],\n",
              "                        [-0.0638, -0.0528,  0.0977]],\n",
              "              \n",
              "                       [[-0.0490, -0.0253, -0.0329],\n",
              "                        [ 0.0802, -0.0208,  0.0177],\n",
              "                        [ 0.0794, -0.0491, -0.0312]],\n",
              "              \n",
              "                       [[-0.0407, -0.0913,  0.0126],\n",
              "                        [-0.0741, -0.0712, -0.0969],\n",
              "                        [ 0.0790, -0.0463,  0.0290]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0732, -0.0208, -0.0529],\n",
              "                        [ 0.0040,  0.0515, -0.0556],\n",
              "                        [ 0.0595, -0.0653,  0.0939]],\n",
              "              \n",
              "                       [[ 0.0003, -0.0868, -0.0258],\n",
              "                        [-0.0056, -0.0059, -0.0034],\n",
              "                        [ 0.0539, -0.0827, -0.0929]],\n",
              "              \n",
              "                       [[ 0.0943,  0.0635,  0.0168],\n",
              "                        [-0.0334,  0.0529, -0.0629],\n",
              "                        [ 0.0654,  0.0276, -0.0963]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0690, -0.0593,  0.0433],\n",
              "                        [-0.0114, -0.0567,  0.0029],\n",
              "                        [ 0.0983, -0.0582,  0.0872]],\n",
              "              \n",
              "                       [[-0.0701,  0.0776,  0.0276],\n",
              "                        [ 0.0257, -0.0025, -0.0662],\n",
              "                        [ 0.0160,  0.0559,  0.0186]],\n",
              "              \n",
              "                       [[-0.0644,  0.0293,  0.0031],\n",
              "                        [-0.0870, -0.0949, -0.0569],\n",
              "                        [-0.0262, -0.0615,  0.0536]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0048,  0.0531, -0.0354],\n",
              "                        [ 0.0091,  0.0680, -0.0962],\n",
              "                        [ 0.0740, -0.0583,  0.0720]],\n",
              "              \n",
              "                       [[ 0.0161,  0.0837, -0.0616],\n",
              "                        [-0.0305, -0.0003, -0.0200],\n",
              "                        [ 0.0077,  0.0879,  0.0982]],\n",
              "              \n",
              "                       [[ 0.0830, -0.0086, -0.0118],\n",
              "                        [-0.0040, -0.0614,  0.0460],\n",
              "                        [ 0.0545, -0.0298, -0.0619]]]])),\n",
              "             ('conv_block1.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block1.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv_block1.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block1.1.running_mean',\n",
              "              tensor([ 6.2486e-04, -6.9016e-04,  1.3345e-03, -3.1291e-03, -2.5663e-04,\n",
              "                       6.0793e-04, -2.1104e-03,  1.1563e-03, -8.8466e-05, -2.4224e-03,\n",
              "                       5.3618e-04,  5.4382e-03, -1.8275e-03,  7.5523e-04,  3.1025e-04,\n",
              "                       4.1752e-04,  1.4323e-03,  2.6105e-03,  3.7138e-03, -2.5185e-03,\n",
              "                      -1.8035e-03, -3.9838e-03,  3.2250e-04, -3.9314e-04,  1.6225e-03,\n",
              "                      -3.8524e-03,  1.5302e-03,  9.9002e-04,  1.9074e-03, -1.8143e-03,\n",
              "                       8.9210e-04, -2.5363e-03,  3.1141e-03, -2.2944e-04,  3.8449e-03,\n",
              "                      -1.1958e-03, -1.4098e-03, -1.0135e-03, -4.3214e-04, -1.4729e-03,\n",
              "                       1.3516e-03,  1.7865e-03,  2.2213e-03,  3.5702e-04, -1.2829e-03,\n",
              "                      -8.2751e-04, -2.1486e-03, -3.5571e-03,  1.2541e-03, -3.0255e-03,\n",
              "                      -9.7734e-04, -3.1517e-04, -1.0543e-03,  1.7831e-03, -8.0198e-04,\n",
              "                      -1.4087e-03,  6.9951e-04, -4.9231e-03, -1.8032e-03, -1.9736e-03,\n",
              "                       2.1039e-03,  1.3523e-03, -9.9976e-04, -2.2662e-03])),\n",
              "             ('conv_block1.1.running_var',\n",
              "              tensor([0.9025, 0.9077, 0.9252, 0.9162, 0.9036, 0.9079, 0.9045, 0.9016, 0.9076,\n",
              "                      0.9168, 0.9043, 0.9854, 0.9040, 0.9094, 0.9060, 0.9161, 0.9077, 0.9064,\n",
              "                      0.9338, 0.9234, 0.9184, 0.9354, 0.9038, 0.9040, 0.9203, 0.9599, 0.9077,\n",
              "                      0.9271, 0.9059, 0.9113, 0.9077, 0.9113, 0.9297, 0.9015, 0.9378, 0.9044,\n",
              "                      0.9172, 0.9061, 0.9018, 0.9097, 0.9032, 0.9065, 0.9172, 0.9051, 0.9045,\n",
              "                      0.9049, 0.9200, 0.9571, 0.9020, 0.9139, 0.9041, 0.9173, 0.9101, 0.9072,\n",
              "                      0.9023, 0.9016, 0.9054, 0.9905, 0.9238, 0.9038, 0.9042, 0.9043, 0.9068,\n",
              "                      0.9083])),\n",
              "             ('conv_block1.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv_block2.0.weight',\n",
              "              tensor([[[[ 0.0264, -0.0509,  0.0285],\n",
              "                        [-0.0241,  0.0304,  0.0320],\n",
              "                        [ 0.0541, -0.0196,  0.0491]],\n",
              "              \n",
              "                       [[-0.0576, -0.0155,  0.0363],\n",
              "                        [ 0.0119,  0.0316,  0.0207],\n",
              "                        [ 0.0208, -0.0273, -0.0147]],\n",
              "              \n",
              "                       [[ 0.0101,  0.0577,  0.0095],\n",
              "                        [ 0.0043,  0.0488, -0.0518],\n",
              "                        [-0.0057, -0.0388,  0.0160]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0230,  0.0534,  0.0487],\n",
              "                        [ 0.0043, -0.0512, -0.0225],\n",
              "                        [-0.0474, -0.0066, -0.0072]],\n",
              "              \n",
              "                       [[ 0.0212,  0.0522, -0.0556],\n",
              "                        [ 0.0559,  0.0188,  0.0126],\n",
              "                        [ 0.0530,  0.0429, -0.0027]],\n",
              "              \n",
              "                       [[ 0.0516, -0.0523,  0.0087],\n",
              "                        [-0.0293, -0.0471,  0.0372],\n",
              "                        [-0.0138, -0.0384, -0.0579]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0181,  0.0184,  0.0524],\n",
              "                        [-0.0340,  0.0205, -0.0279],\n",
              "                        [-0.0170, -0.0059,  0.0278]],\n",
              "              \n",
              "                       [[ 0.0501, -0.0568,  0.0251],\n",
              "                        [-0.0414,  0.0270, -0.0326],\n",
              "                        [-0.0468,  0.0342, -0.0447]],\n",
              "              \n",
              "                       [[ 0.0444,  0.0334, -0.0417],\n",
              "                        [-0.0150, -0.0020,  0.0245],\n",
              "                        [ 0.0068, -0.0389, -0.0277]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0566,  0.0528, -0.0448],\n",
              "                        [ 0.0375, -0.0176, -0.0020],\n",
              "                        [-0.0544, -0.0460, -0.0582]],\n",
              "              \n",
              "                       [[-0.0192,  0.0112, -0.0367],\n",
              "                        [ 0.0320,  0.0415, -0.0059],\n",
              "                        [ 0.0479, -0.0230,  0.0469]],\n",
              "              \n",
              "                       [[ 0.0091,  0.0549,  0.0140],\n",
              "                        [-0.0475,  0.0241, -0.0479],\n",
              "                        [-0.0285, -0.0530,  0.0350]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0106, -0.0380, -0.0052],\n",
              "                        [ 0.0460, -0.0176, -0.0154],\n",
              "                        [-0.0228,  0.0514, -0.0305]],\n",
              "              \n",
              "                       [[ 0.0531, -0.0194, -0.0140],\n",
              "                        [ 0.0272,  0.0499,  0.0360],\n",
              "                        [ 0.0084, -0.0518, -0.0331]],\n",
              "              \n",
              "                       [[ 0.0417,  0.0398,  0.0346],\n",
              "                        [ 0.0148,  0.0048,  0.0456],\n",
              "                        [ 0.0554, -0.0224,  0.0526]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0223, -0.0425, -0.0141],\n",
              "                        [-0.0527,  0.0507,  0.0318],\n",
              "                        [-0.0285,  0.0255,  0.0112]],\n",
              "              \n",
              "                       [[ 0.0452, -0.0435, -0.0402],\n",
              "                        [-0.0138,  0.0290,  0.0585],\n",
              "                        [-0.0071,  0.0064, -0.0321]],\n",
              "              \n",
              "                       [[ 0.0138,  0.0492, -0.0136],\n",
              "                        [-0.0057,  0.0514, -0.0491],\n",
              "                        [ 0.0064, -0.0211,  0.0015]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0043,  0.0350, -0.0572],\n",
              "                        [-0.0306,  0.0119,  0.0498],\n",
              "                        [-0.0166,  0.0241,  0.0124]],\n",
              "              \n",
              "                       [[-0.0268,  0.0370, -0.0182],\n",
              "                        [-0.0418, -0.0540,  0.0533],\n",
              "                        [ 0.0485,  0.0061, -0.0170]],\n",
              "              \n",
              "                       [[ 0.0037, -0.0036, -0.0155],\n",
              "                        [-0.0481, -0.0472,  0.0022],\n",
              "                        [ 0.0368,  0.0462, -0.0283]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0447, -0.0566,  0.0505],\n",
              "                        [-0.0387, -0.0553,  0.0543],\n",
              "                        [-0.0121,  0.0144,  0.0416]],\n",
              "              \n",
              "                       [[ 0.0491,  0.0517,  0.0239],\n",
              "                        [-0.0507, -0.0245,  0.0567],\n",
              "                        [-0.0302, -0.0346, -0.0369]],\n",
              "              \n",
              "                       [[ 0.0091, -0.0427, -0.0094],\n",
              "                        [ 0.0093,  0.0492, -0.0009],\n",
              "                        [-0.0104, -0.0460, -0.0192]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0478, -0.0010,  0.0428],\n",
              "                        [ 0.0291,  0.0053,  0.0433],\n",
              "                        [-0.0058, -0.0552, -0.0185]],\n",
              "              \n",
              "                       [[-0.0373,  0.0098, -0.0420],\n",
              "                        [ 0.0536,  0.0533,  0.0320],\n",
              "                        [ 0.0533, -0.0470, -0.0469]],\n",
              "              \n",
              "                       [[-0.0349, -0.0562, -0.0560],\n",
              "                        [ 0.0161, -0.0536,  0.0131],\n",
              "                        [-0.0140,  0.0502,  0.0577]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0318,  0.0053,  0.0426],\n",
              "                        [ 0.0038,  0.0461,  0.0392],\n",
              "                        [-0.0476,  0.0262,  0.0168]],\n",
              "              \n",
              "                       [[-0.0150, -0.0027, -0.0021],\n",
              "                        [-0.0282,  0.0528, -0.0093],\n",
              "                        [ 0.0372, -0.0428, -0.0575]],\n",
              "              \n",
              "                       [[-0.0308, -0.0136, -0.0065],\n",
              "                        [-0.0173, -0.0449,  0.0134],\n",
              "                        [ 0.0017, -0.0054,  0.0134]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0282, -0.0159,  0.0555],\n",
              "                        [-0.0382, -0.0111,  0.0349],\n",
              "                        [ 0.0518, -0.0449,  0.0026]],\n",
              "              \n",
              "                       [[ 0.0568, -0.0220, -0.0155],\n",
              "                        [ 0.0019,  0.0378, -0.0358],\n",
              "                        [-0.0392, -0.0342, -0.0534]],\n",
              "              \n",
              "                       [[ 0.0172,  0.0463, -0.0068],\n",
              "                        [ 0.0537,  0.0354, -0.0461],\n",
              "                        [ 0.0378,  0.0568, -0.0585]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0134, -0.0543, -0.0064],\n",
              "                        [-0.0363, -0.0345,  0.0288],\n",
              "                        [-0.0010, -0.0295,  0.0021]],\n",
              "              \n",
              "                       [[-0.0453, -0.0173, -0.0155],\n",
              "                        [-0.0357, -0.0526, -0.0506],\n",
              "                        [-0.0052,  0.0256,  0.0456]],\n",
              "              \n",
              "                       [[-0.0199, -0.0154,  0.0386],\n",
              "                        [-0.0277, -0.0112, -0.0288],\n",
              "                        [-0.0436,  0.0068,  0.0174]]]])),\n",
              "             ('conv_block2.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block2.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1.])),\n",
              "             ('conv_block2.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block2.1.running_mean',\n",
              "              tensor([ 0.0123, -0.0145, -0.0164, -0.0440, -0.0197, -0.0135, -0.0324, -0.0033,\n",
              "                      -0.0542, -0.0373, -0.0450, -0.0280,  0.0359, -0.0052,  0.0025, -0.0014,\n",
              "                      -0.0308,  0.0433,  0.0005,  0.0102,  0.0551, -0.0452,  0.0256,  0.0124,\n",
              "                       0.0136,  0.0902, -0.0553, -0.1117, -0.0249,  0.0415,  0.0351,  0.0171,\n",
              "                       0.0113, -0.0579,  0.0138,  0.0644, -0.0317, -0.0414, -0.0155, -0.0269,\n",
              "                      -0.0643,  0.0721,  0.0204, -0.0392, -0.0314, -0.0224, -0.0037,  0.0056,\n",
              "                       0.0289,  0.0144,  0.0480, -0.0744, -0.0518,  0.0164, -0.0075,  0.0338,\n",
              "                      -0.0056,  0.0154, -0.0348,  0.0212, -0.0020,  0.0220, -0.0039,  0.0166,\n",
              "                      -0.0502, -0.0645, -0.0033,  0.0185, -0.0132, -0.0214,  0.0308, -0.0129,\n",
              "                       0.0615, -0.0468,  0.0169,  0.0149,  0.0427, -0.0418, -0.0132,  0.0205,\n",
              "                      -0.0209, -0.0966, -0.0595,  0.0376,  0.0736,  0.0297,  0.0614,  0.0073,\n",
              "                       0.0846,  0.0966,  0.0415,  0.0307, -0.0970, -0.0156, -0.0567, -0.1286,\n",
              "                      -0.0582, -0.0058,  0.0580, -0.0347,  0.0200, -0.0236, -0.0152,  0.0097,\n",
              "                      -0.0106,  0.0056, -0.0036,  0.0269,  0.0172,  0.0796,  0.0092,  0.0323,\n",
              "                      -0.0463, -0.0406, -0.0247,  0.0331, -0.0556, -0.0244,  0.0503,  0.0299,\n",
              "                      -0.0732, -0.0615,  0.0227,  0.0151, -0.0395,  0.0087,  0.0144, -0.0613])),\n",
              "             ('conv_block2.1.running_var',\n",
              "              tensor([0.9152, 0.9386, 0.9217, 0.9310, 0.9282, 0.9312, 0.9337, 0.9262, 0.9615,\n",
              "                      0.9210, 0.9172, 0.9242, 0.9250, 0.9255, 0.9233, 0.9200, 0.9314, 0.9243,\n",
              "                      0.9303, 0.9211, 0.9157, 0.9464, 0.9350, 0.9438, 0.9260, 0.9374, 0.9214,\n",
              "                      0.9502, 0.9166, 0.9627, 0.9284, 0.9308, 0.9369, 0.9192, 0.9189, 0.9605,\n",
              "                      0.9470, 0.9313, 0.9314, 0.9395, 0.9409, 0.9432, 0.9235, 0.9417, 0.9365,\n",
              "                      0.9241, 0.9359, 0.9202, 0.9268, 0.9228, 0.9287, 0.9927, 0.9242, 0.9261,\n",
              "                      0.9413, 0.9186, 0.9212, 0.9283, 0.9224, 0.9315, 0.9116, 0.9253, 0.9352,\n",
              "                      0.9251, 0.9271, 0.9305, 0.9229, 0.9176, 0.9589, 0.9183, 0.9250, 0.9181,\n",
              "                      0.9352, 0.9183, 0.9264, 0.9235, 0.9356, 0.9248, 0.9246, 0.9139, 0.9430,\n",
              "                      0.9302, 0.9408, 0.9378, 0.9427, 0.9176, 0.9238, 0.9418, 0.9382, 0.9357,\n",
              "                      0.9495, 0.9555, 0.9577, 0.9286, 0.9247, 0.9517, 0.9400, 0.9294, 0.9190,\n",
              "                      0.9184, 0.9250, 0.9169, 0.9290, 0.9324, 0.9192, 0.9335, 0.9376, 0.9289,\n",
              "                      0.9169, 0.9428, 0.9166, 0.9153, 0.9196, 0.9279, 0.9305, 0.9152, 0.9235,\n",
              "                      0.9209, 0.9523, 0.9262, 0.9328, 0.9313, 0.9221, 0.9705, 0.9291, 0.9108,\n",
              "                      0.9148, 0.9882])),\n",
              "             ('conv_block2.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv_block3.0.weight',\n",
              "              tensor([[[[ 3.7521e-02,  3.8778e-02,  2.4111e-02],\n",
              "                        [-4.6382e-03,  3.1842e-02,  3.7176e-02],\n",
              "                        [ 2.5517e-02,  2.0197e-02, -1.4309e-03]],\n",
              "              \n",
              "                       [[-4.7241e-03,  3.4072e-02,  7.1702e-03],\n",
              "                        [-7.3303e-03, -1.1501e-02, -1.1053e-02],\n",
              "                        [-1.6336e-02,  4.0782e-02, -1.2619e-03]],\n",
              "              \n",
              "                       [[ 6.8640e-05, -2.4582e-02,  1.7453e-02],\n",
              "                        [ 6.1747e-03, -3.9035e-02,  3.8293e-02],\n",
              "                        [-4.0512e-02, -1.2041e-02, -9.9447e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-4.0616e-02,  3.9937e-02, -3.5866e-02],\n",
              "                        [ 3.1042e-02, -3.7037e-03, -7.4999e-03],\n",
              "                        [ 2.5858e-02,  6.8664e-03,  2.9328e-02]],\n",
              "              \n",
              "                       [[ 1.7569e-02,  2.5347e-02, -2.3884e-02],\n",
              "                        [-1.6491e-02,  1.3471e-02, -1.9765e-02],\n",
              "                        [-7.5290e-03,  3.0336e-02, -1.4302e-02]],\n",
              "              \n",
              "                       [[ 8.1080e-03,  5.9752e-03, -5.6731e-03],\n",
              "                        [ 2.4153e-02,  3.9052e-02, -2.9394e-02],\n",
              "                        [ 4.0555e-02, -8.9578e-03, -6.6286e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 3.4753e-02, -3.4449e-02,  1.1101e-02],\n",
              "                        [ 1.1504e-02,  3.5948e-02,  2.2166e-02],\n",
              "                        [ 3.6274e-02,  1.0351e-02,  1.1195e-02]],\n",
              "              \n",
              "                       [[-3.3086e-03,  1.3383e-02, -2.7053e-02],\n",
              "                        [-7.1111e-04, -2.8623e-02,  9.8806e-03],\n",
              "                        [-3.7812e-02, -3.1526e-02, -1.2381e-02]],\n",
              "              \n",
              "                       [[-2.9270e-02, -3.0289e-02, -2.8517e-02],\n",
              "                        [-3.8586e-03,  6.1445e-03,  2.1869e-02],\n",
              "                        [-8.2678e-03, -1.5384e-02,  2.7920e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.1840e-02,  1.6277e-02, -3.4678e-02],\n",
              "                        [-3.4971e-02, -2.4717e-02, -2.6524e-02],\n",
              "                        [-4.0512e-02,  1.3905e-02,  4.0864e-02]],\n",
              "              \n",
              "                       [[-3.5668e-03, -1.0993e-03,  2.7711e-02],\n",
              "                        [ 2.2467e-03, -4.8704e-04,  1.5645e-02],\n",
              "                        [-3.8324e-02,  9.4620e-03,  6.8750e-03]],\n",
              "              \n",
              "                       [[-1.5758e-02, -2.1993e-02, -2.5502e-03],\n",
              "                        [-2.7553e-02, -2.5388e-02, -3.3163e-02],\n",
              "                        [-1.5424e-02,  1.5645e-02,  9.4042e-04]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.6505e-02,  7.2105e-03, -9.4609e-03],\n",
              "                        [ 8.5075e-03, -3.3068e-02,  1.5585e-02],\n",
              "                        [ 2.0709e-02, -1.7134e-03,  3.4682e-02]],\n",
              "              \n",
              "                       [[-2.2090e-02, -3.0043e-02, -2.8243e-02],\n",
              "                        [-4.5062e-03,  8.3713e-03,  4.0448e-02],\n",
              "                        [ 3.2590e-02, -4.0392e-02, -2.3561e-02]],\n",
              "              \n",
              "                       [[ 2.3389e-02,  2.8993e-02, -3.1643e-02],\n",
              "                        [ 3.7766e-02, -1.8897e-02, -3.7644e-02],\n",
              "                        [-3.6590e-02,  2.2817e-02,  3.4650e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.2270e-02,  1.5832e-02,  1.3989e-03],\n",
              "                        [ 1.6734e-03,  3.8846e-02,  6.1926e-03],\n",
              "                        [-6.1687e-03, -5.7804e-03,  3.5485e-02]],\n",
              "              \n",
              "                       [[-2.7365e-02, -2.8623e-02, -3.6883e-02],\n",
              "                        [-2.4632e-02,  1.8222e-02,  2.3000e-02],\n",
              "                        [-3.4000e-03, -3.3993e-03, -3.7238e-02]],\n",
              "              \n",
              "                       [[ 3.6668e-02,  1.6037e-02,  1.2337e-02],\n",
              "                        [-1.8287e-02,  3.5253e-03, -2.6994e-02],\n",
              "                        [-3.2624e-02,  1.7606e-02, -2.6741e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-1.0146e-02,  9.2752e-03,  3.2958e-02],\n",
              "                        [ 2.9368e-02, -3.3500e-02,  2.5984e-02],\n",
              "                        [ 3.0782e-02,  2.2447e-02,  3.3014e-02]],\n",
              "              \n",
              "                       [[-1.1527e-02, -3.2210e-02, -4.2108e-03],\n",
              "                        [-2.8821e-02, -2.8492e-02, -2.3150e-02],\n",
              "                        [-3.4798e-02,  1.9770e-02, -2.9198e-03]],\n",
              "              \n",
              "                       [[ 1.1023e-02,  7.2559e-04,  1.5290e-02],\n",
              "                        [-2.5007e-02,  6.9797e-05,  1.8414e-02],\n",
              "                        [-3.9152e-02, -1.4341e-02, -3.8939e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.1868e-02,  3.8321e-02,  1.8787e-02],\n",
              "                        [ 2.9623e-02,  3.6281e-02,  2.0889e-02],\n",
              "                        [ 1.8532e-02, -2.5772e-02, -3.0378e-02]],\n",
              "              \n",
              "                       [[ 1.7322e-02, -2.2901e-02, -3.3475e-03],\n",
              "                        [-1.4752e-02, -3.8767e-02,  1.0138e-02],\n",
              "                        [ 2.8690e-04, -3.5394e-02,  2.6670e-02]],\n",
              "              \n",
              "                       [[ 2.3221e-04, -2.2330e-03,  6.7825e-04],\n",
              "                        [-3.9010e-02,  4.3557e-03, -1.8360e-02],\n",
              "                        [ 2.5211e-02, -3.0280e-02,  4.1102e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.9091e-02, -1.5268e-03,  1.7545e-02],\n",
              "                        [ 2.0178e-02, -1.2759e-02, -1.3130e-02],\n",
              "                        [-1.5300e-02, -1.0470e-02, -1.7203e-02]],\n",
              "              \n",
              "                       [[ 1.4240e-02, -1.1313e-02,  1.1634e-02],\n",
              "                        [-3.9295e-02,  1.6362e-02, -3.8210e-02],\n",
              "                        [ 2.4512e-02, -2.1456e-02,  3.9275e-02]],\n",
              "              \n",
              "                       [[-1.6830e-02,  2.6211e-02,  4.1587e-02],\n",
              "                        [ 3.6127e-02, -8.9651e-03, -3.7198e-02],\n",
              "                        [ 5.8351e-03,  2.9791e-02, -8.6771e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-7.9884e-03, -1.2523e-02,  2.3682e-02],\n",
              "                        [ 3.7075e-02, -1.0741e-02,  2.4812e-02],\n",
              "                        [-2.1142e-02, -2.9118e-02,  3.9777e-03]],\n",
              "              \n",
              "                       [[ 2.8608e-02,  3.3700e-02, -3.6230e-03],\n",
              "                        [-1.3562e-02,  7.6847e-03,  2.8076e-02],\n",
              "                        [-2.6211e-02,  2.8559e-03,  1.3135e-02]],\n",
              "              \n",
              "                       [[-1.1459e-02, -1.5634e-02,  7.6323e-03],\n",
              "                        [ 2.2107e-03,  5.5680e-03, -3.3982e-02],\n",
              "                        [-1.5038e-02, -7.9393e-03, -2.1465e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-1.3003e-02,  1.6857e-02,  1.0289e-03],\n",
              "                        [ 8.0148e-03, -3.9898e-02,  3.2523e-02],\n",
              "                        [-3.6476e-03, -3.5285e-02, -1.0822e-02]],\n",
              "              \n",
              "                       [[-3.4677e-03, -7.0638e-03,  3.0695e-02],\n",
              "                        [-4.9523e-03,  4.1180e-02,  2.0643e-03],\n",
              "                        [ 3.4327e-02, -2.9376e-02, -2.6041e-02]],\n",
              "              \n",
              "                       [[-2.1761e-03,  1.5458e-02, -3.8729e-03],\n",
              "                        [-9.3750e-03,  2.1600e-02, -7.5700e-03],\n",
              "                        [-1.1306e-02,  2.3441e-02, -2.5460e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.8326e-02, -6.1121e-03,  9.8928e-03],\n",
              "                        [-3.5540e-02, -3.9002e-02, -1.1853e-03],\n",
              "                        [-3.2734e-02, -6.3075e-03, -1.4394e-03]],\n",
              "              \n",
              "                       [[ 4.8515e-03, -1.8783e-02,  1.7184e-02],\n",
              "                        [-1.1618e-02, -2.6861e-03,  3.7521e-02],\n",
              "                        [ 1.3900e-02,  2.8464e-02,  1.7517e-02]],\n",
              "              \n",
              "                       [[-2.0533e-02, -1.1232e-02, -2.4575e-02],\n",
              "                        [-3.8104e-02,  4.0553e-02,  1.3786e-02],\n",
              "                        [-1.4574e-02, -1.3440e-02,  4.9739e-03]]]])),\n",
              "             ('conv_block3.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block3.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1.])),\n",
              "             ('conv_block3.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block3.1.running_mean',\n",
              "              tensor([-0.0621,  0.0077, -0.0283, -0.0203,  0.0879,  0.0331, -0.0669,  0.1067,\n",
              "                       0.0046, -0.0649, -0.0960, -0.0788,  0.0208,  0.0008, -0.0872,  0.0066,\n",
              "                      -0.0311,  0.0255,  0.0327,  0.0425, -0.0538,  0.0093, -0.0033, -0.0991,\n",
              "                      -0.0212, -0.0209,  0.0346,  0.0708,  0.0607,  0.0885, -0.0529,  0.0185,\n",
              "                       0.0199, -0.1153, -0.0329, -0.0122, -0.0527,  0.0254, -0.1018, -0.0114,\n",
              "                       0.1165,  0.0104, -0.0572, -0.0161, -0.0234,  0.0109,  0.0422,  0.0234,\n",
              "                      -0.0382,  0.0202,  0.0333,  0.0087, -0.0454, -0.0147,  0.0195, -0.0478,\n",
              "                       0.0284, -0.0626,  0.0049, -0.0465, -0.0120,  0.0322, -0.0400,  0.0101,\n",
              "                      -0.0136, -0.0381, -0.0296,  0.0681, -0.0402, -0.0233, -0.0671, -0.0063,\n",
              "                      -0.0360, -0.0032,  0.0065, -0.0516,  0.0433, -0.0255,  0.0329,  0.0043,\n",
              "                      -0.0040, -0.0345, -0.0256,  0.0213, -0.0883,  0.0472,  0.0460,  0.0582,\n",
              "                       0.0143,  0.0164, -0.0747, -0.0999,  0.0512,  0.0089, -0.0012,  0.0095,\n",
              "                       0.0205,  0.0463,  0.0214, -0.0325, -0.0142, -0.1042,  0.0351, -0.0447,\n",
              "                      -0.0648,  0.0095,  0.0098,  0.0018, -0.0487, -0.0016,  0.0193,  0.0529,\n",
              "                      -0.0377,  0.0837,  0.0574, -0.0110, -0.0938, -0.0203, -0.0118, -0.0409,\n",
              "                       0.0135,  0.0533,  0.1117,  0.1023, -0.1375, -0.0231, -0.0962, -0.0146,\n",
              "                      -0.0388,  0.0184,  0.0652,  0.0623,  0.0377, -0.0232,  0.0449,  0.0291,\n",
              "                       0.0469,  0.0518, -0.0139, -0.0203, -0.0796, -0.0222,  0.1088,  0.0060,\n",
              "                       0.0115, -0.0169,  0.0580,  0.0379,  0.0370, -0.0619, -0.0325, -0.1028,\n",
              "                       0.0613, -0.0106, -0.0336,  0.0614,  0.0077,  0.0338, -0.0488, -0.0729,\n",
              "                      -0.0032,  0.0496,  0.0171, -0.0716,  0.0060, -0.0186, -0.0458, -0.0741,\n",
              "                       0.0571,  0.0087,  0.0434,  0.0008, -0.0038,  0.0488, -0.0386,  0.0529,\n",
              "                      -0.0079, -0.0443, -0.0845, -0.0499, -0.0283,  0.0376, -0.0319,  0.0338,\n",
              "                       0.0282, -0.0091, -0.0146,  0.0175, -0.0735,  0.0117, -0.0557, -0.0153,\n",
              "                      -0.0013, -0.0169,  0.0899, -0.0885,  0.0571,  0.0107,  0.0375, -0.0248,\n",
              "                      -0.0453, -0.0063, -0.0491, -0.0254,  0.0273, -0.0090, -0.0098,  0.0201,\n",
              "                      -0.0608, -0.0215, -0.0373, -0.0916, -0.0290, -0.0363, -0.0090, -0.0242,\n",
              "                      -0.0363, -0.0199, -0.0578, -0.0091,  0.0747,  0.0575,  0.0254, -0.0033,\n",
              "                       0.0077,  0.0721,  0.0310,  0.0110,  0.0718,  0.0664,  0.0006,  0.1150,\n",
              "                      -0.0010,  0.0137,  0.0261, -0.0453,  0.0420,  0.0183,  0.0358,  0.0648,\n",
              "                       0.0667, -0.0157, -0.0262,  0.0218, -0.0401,  0.0337, -0.0157,  0.0149,\n",
              "                       0.1045,  0.0494, -0.0717,  0.0123,  0.0216,  0.0027, -0.0406,  0.0429])),\n",
              "             ('conv_block3.1.running_var',\n",
              "              tensor([0.9398, 0.9216, 0.9257, 0.9229, 0.9584, 0.9355, 0.9587, 0.9589, 0.9264,\n",
              "                      0.9276, 0.9491, 0.9445, 0.9231, 0.9317, 0.9330, 0.9276, 0.9288, 0.9292,\n",
              "                      0.9333, 0.9482, 0.9294, 0.9374, 0.9211, 0.9404, 0.9302, 0.9313, 0.9307,\n",
              "                      0.9621, 0.9554, 0.9403, 0.9243, 0.9414, 0.9241, 0.9403, 0.9326, 0.9216,\n",
              "                      0.9628, 0.9309, 0.9357, 0.9283, 0.9444, 0.9289, 0.9538, 0.9280, 0.9397,\n",
              "                      0.9243, 0.9541, 0.9208, 0.9436, 0.9383, 0.9249, 0.9265, 0.9323, 0.9385,\n",
              "                      0.9319, 0.9354, 0.9261, 0.9354, 0.9339, 0.9385, 0.9238, 0.9258, 0.9378,\n",
              "                      0.9487, 0.9253, 0.9307, 0.9402, 0.9313, 0.9262, 0.9285, 0.9278, 0.9451,\n",
              "                      0.9231, 0.9188, 0.9257, 0.9527, 0.9413, 0.9173, 0.9401, 0.9241, 0.9293,\n",
              "                      0.9262, 0.9377, 0.9299, 0.9406, 0.9465, 0.9300, 0.9408, 0.9396, 0.9367,\n",
              "                      0.9614, 0.9381, 0.9451, 0.9225, 0.9382, 0.9274, 0.9337, 0.9371, 0.9624,\n",
              "                      0.9423, 0.9249, 0.9531, 0.9335, 0.9357, 0.9324, 0.9350, 0.9350, 0.9455,\n",
              "                      0.9488, 0.9372, 0.9296, 0.9319, 0.9332, 0.9502, 0.9579, 0.9360, 0.9386,\n",
              "                      0.9309, 0.9476, 0.9243, 0.9530, 0.9324, 0.9464, 0.9367, 0.9441, 0.9228,\n",
              "                      0.9533, 0.9438, 0.9291, 0.9294, 0.9364, 0.9271, 0.9379, 0.9256, 0.9570,\n",
              "                      0.9320, 0.9286, 0.9306, 0.9265, 0.9224, 0.9385, 0.9303, 0.9532, 0.9253,\n",
              "                      0.9291, 0.9339, 0.9376, 0.9230, 0.9265, 0.9433, 0.9535, 0.9350, 0.9304,\n",
              "                      0.9302, 0.9475, 0.9383, 0.9381, 0.9364, 0.9460, 0.9469, 0.9240, 0.9421,\n",
              "                      0.9555, 0.9269, 0.9503, 0.9335, 0.9362, 0.9295, 0.9441, 0.9426, 0.9297,\n",
              "                      0.9324, 0.9294, 0.9306, 0.9415, 0.9285, 0.9305, 0.9310, 0.9501, 0.9511,\n",
              "                      0.9366, 0.9375, 0.9353, 0.9331, 0.9322, 0.9339, 0.9462, 0.9231, 0.9429,\n",
              "                      0.9388, 0.9363, 0.9287, 0.9245, 0.9421, 0.9443, 0.9548, 0.9330, 0.9297,\n",
              "                      0.9359, 0.9495, 0.9255, 0.9443, 0.9314, 0.9359, 0.9352, 0.9363, 0.9221,\n",
              "                      0.9343, 0.9189, 0.9271, 0.9329, 0.9411, 0.9432, 0.9581, 0.9341, 0.9344,\n",
              "                      0.9338, 0.9292, 0.9338, 0.9360, 0.9545, 0.9560, 0.9286, 0.9273, 0.9262,\n",
              "                      0.9267, 0.9423, 0.9353, 0.9405, 0.9521, 0.9290, 0.9559, 0.9565, 0.9372,\n",
              "                      0.9239, 0.9348, 0.9209, 0.9212, 0.9400, 0.9350, 0.9256, 0.9354, 0.9282,\n",
              "                      0.9242, 0.9370, 0.9463, 0.9311, 0.9277, 0.9322, 0.9490, 0.9284, 0.9319,\n",
              "                      0.9290, 0.9284, 0.9249, 0.9496])),\n",
              "             ('conv_block3.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv_block3.3.weight',\n",
              "              tensor([[[[-0.0135,  0.0272, -0.0205],\n",
              "                        [ 0.0261,  0.0331, -0.0353],\n",
              "                        [ 0.0133,  0.0083,  0.0272]],\n",
              "              \n",
              "                       [[ 0.0295, -0.0198, -0.0116],\n",
              "                        [-0.0306,  0.0103,  0.0307],\n",
              "                        [-0.0329, -0.0061,  0.0139]],\n",
              "              \n",
              "                       [[-0.0006, -0.0319,  0.0029],\n",
              "                        [ 0.0019, -0.0106,  0.0136],\n",
              "                        [-0.0275,  0.0251, -0.0338]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0056, -0.0304, -0.0165],\n",
              "                        [-0.0313, -0.0045,  0.0269],\n",
              "                        [-0.0242,  0.0147,  0.0279]],\n",
              "              \n",
              "                       [[-0.0032, -0.0074,  0.0208],\n",
              "                        [ 0.0165, -0.0260,  0.0194],\n",
              "                        [-0.0022, -0.0093,  0.0164]],\n",
              "              \n",
              "                       [[-0.0257,  0.0170,  0.0008],\n",
              "                        [-0.0094,  0.0101,  0.0038],\n",
              "                        [-0.0029,  0.0060, -0.0052]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0256, -0.0228, -0.0339],\n",
              "                        [ 0.0121, -0.0235, -0.0310],\n",
              "                        [-0.0258, -0.0354, -0.0066]],\n",
              "              \n",
              "                       [[ 0.0249, -0.0049,  0.0132],\n",
              "                        [-0.0018, -0.0293, -0.0275],\n",
              "                        [-0.0281,  0.0182, -0.0085]],\n",
              "              \n",
              "                       [[ 0.0161,  0.0210,  0.0154],\n",
              "                        [ 0.0342,  0.0245,  0.0107],\n",
              "                        [ 0.0080, -0.0253,  0.0336]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0094, -0.0034, -0.0182],\n",
              "                        [-0.0302,  0.0142,  0.0250],\n",
              "                        [ 0.0038,  0.0131,  0.0108]],\n",
              "              \n",
              "                       [[ 0.0067,  0.0265,  0.0010],\n",
              "                        [-0.0273,  0.0059, -0.0326],\n",
              "                        [ 0.0343,  0.0100, -0.0320]],\n",
              "              \n",
              "                       [[ 0.0005, -0.0341,  0.0312],\n",
              "                        [-0.0235,  0.0316, -0.0123],\n",
              "                        [-0.0109, -0.0012, -0.0049]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0261, -0.0179,  0.0108],\n",
              "                        [-0.0136,  0.0033, -0.0243],\n",
              "                        [ 0.0330, -0.0035, -0.0335]],\n",
              "              \n",
              "                       [[-0.0080,  0.0095,  0.0141],\n",
              "                        [ 0.0306,  0.0040,  0.0200],\n",
              "                        [ 0.0121,  0.0080, -0.0347]],\n",
              "              \n",
              "                       [[ 0.0091,  0.0096,  0.0274],\n",
              "                        [-0.0138, -0.0237, -0.0213],\n",
              "                        [-0.0199,  0.0047, -0.0084]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0209,  0.0156, -0.0320],\n",
              "                        [-0.0213, -0.0318,  0.0094],\n",
              "                        [-0.0282,  0.0295, -0.0342]],\n",
              "              \n",
              "                       [[-0.0038, -0.0295, -0.0039],\n",
              "                        [ 0.0312,  0.0269,  0.0341],\n",
              "                        [ 0.0222, -0.0217,  0.0011]],\n",
              "              \n",
              "                       [[-0.0165, -0.0090,  0.0333],\n",
              "                        [-0.0272, -0.0073, -0.0239],\n",
              "                        [-0.0242, -0.0305,  0.0144]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0154, -0.0327,  0.0067],\n",
              "                        [-0.0191, -0.0126, -0.0228],\n",
              "                        [-0.0160, -0.0238,  0.0279]],\n",
              "              \n",
              "                       [[-0.0162, -0.0236, -0.0143],\n",
              "                        [-0.0288, -0.0175,  0.0091],\n",
              "                        [-0.0058,  0.0278, -0.0124]],\n",
              "              \n",
              "                       [[-0.0129,  0.0298,  0.0214],\n",
              "                        [ 0.0117,  0.0106,  0.0337],\n",
              "                        [ 0.0043,  0.0094,  0.0292]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0276,  0.0303, -0.0275],\n",
              "                        [ 0.0155, -0.0169,  0.0270],\n",
              "                        [-0.0222,  0.0152, -0.0325]],\n",
              "              \n",
              "                       [[ 0.0328,  0.0189, -0.0116],\n",
              "                        [ 0.0343, -0.0339, -0.0059],\n",
              "                        [-0.0035,  0.0147,  0.0075]],\n",
              "              \n",
              "                       [[-0.0105,  0.0059, -0.0014],\n",
              "                        [ 0.0241, -0.0248,  0.0241],\n",
              "                        [-0.0202, -0.0055, -0.0146]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0095, -0.0358,  0.0079],\n",
              "                        [-0.0272, -0.0213, -0.0117],\n",
              "                        [-0.0157,  0.0157, -0.0148]],\n",
              "              \n",
              "                       [[ 0.0178, -0.0239, -0.0319],\n",
              "                        [-0.0125,  0.0056, -0.0008],\n",
              "                        [ 0.0351, -0.0085,  0.0017]],\n",
              "              \n",
              "                       [[-0.0195,  0.0217,  0.0222],\n",
              "                        [ 0.0230, -0.0318,  0.0158],\n",
              "                        [ 0.0168,  0.0138,  0.0147]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0278,  0.0067,  0.0281],\n",
              "                        [-0.0238,  0.0309,  0.0210],\n",
              "                        [-0.0053, -0.0255,  0.0109]],\n",
              "              \n",
              "                       [[-0.0320, -0.0070,  0.0150],\n",
              "                        [ 0.0082, -0.0272, -0.0356],\n",
              "                        [ 0.0296, -0.0347, -0.0021]],\n",
              "              \n",
              "                       [[ 0.0198,  0.0164, -0.0064],\n",
              "                        [ 0.0086,  0.0321, -0.0270],\n",
              "                        [-0.0111,  0.0219,  0.0161]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0330, -0.0285,  0.0040],\n",
              "                        [ 0.0348,  0.0291, -0.0183],\n",
              "                        [ 0.0153,  0.0211,  0.0224]],\n",
              "              \n",
              "                       [[ 0.0230, -0.0137,  0.0284],\n",
              "                        [-0.0337,  0.0160,  0.0012],\n",
              "                        [-0.0190, -0.0116,  0.0188]],\n",
              "              \n",
              "                       [[ 0.0172, -0.0286, -0.0088],\n",
              "                        [-0.0031,  0.0154, -0.0193],\n",
              "                        [ 0.0282,  0.0314,  0.0018]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0301,  0.0037, -0.0286],\n",
              "                        [ 0.0185, -0.0254, -0.0341],\n",
              "                        [ 0.0351,  0.0014,  0.0180]],\n",
              "              \n",
              "                       [[-0.0211,  0.0317,  0.0135],\n",
              "                        [-0.0330, -0.0269,  0.0274],\n",
              "                        [-0.0268, -0.0270, -0.0179]],\n",
              "              \n",
              "                       [[-0.0163,  0.0310,  0.0084],\n",
              "                        [ 0.0256, -0.0299,  0.0325],\n",
              "                        [-0.0242, -0.0135, -0.0034]]]])),\n",
              "             ('conv_block3.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block3.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1.])),\n",
              "             ('conv_block3.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block3.4.running_mean',\n",
              "              tensor([ 1.2228e-02, -3.4775e-02, -1.9238e-02, -2.1514e-02, -4.5623e-02,\n",
              "                       7.3517e-03, -5.9372e-02,  2.5969e-03,  8.3221e-03,  3.9455e-02,\n",
              "                      -2.6189e-02,  2.8536e-02, -1.8999e-02, -8.4939e-03,  4.1925e-04,\n",
              "                      -2.6467e-03,  5.5600e-03,  2.4774e-02,  8.7919e-03,  7.0185e-03,\n",
              "                       3.2294e-02, -7.7155e-02, -3.9861e-02, -7.6898e-02, -1.1312e-02,\n",
              "                      -2.9207e-02, -2.8623e-02,  1.7816e-02, -2.4820e-02, -2.2104e-03,\n",
              "                      -1.7714e-02, -4.2757e-02,  1.7506e-02, -2.3641e-02, -4.1707e-03,\n",
              "                       5.0911e-03,  7.8670e-03, -1.9915e-02,  1.7432e-02, -2.6956e-02,\n",
              "                       2.3317e-02, -3.1263e-02,  8.8292e-03, -1.5031e-02,  2.4609e-03,\n",
              "                      -3.0983e-02,  1.8350e-02, -2.4944e-02,  5.8157e-03,  1.1410e-02,\n",
              "                       1.5538e-02, -1.2536e-02, -6.6778e-03, -2.9725e-03,  5.8172e-02,\n",
              "                       2.1086e-02,  2.4669e-02,  5.9556e-03,  3.9745e-02,  6.3060e-02,\n",
              "                       2.4340e-03,  1.9869e-02, -2.4171e-02, -2.4923e-02,  4.6438e-02,\n",
              "                       5.3484e-03, -9.9035e-03, -3.4808e-02, -4.9741e-03, -5.2490e-02,\n",
              "                      -2.3379e-02, -3.4658e-02,  4.7343e-03, -4.7988e-03, -9.3406e-02,\n",
              "                       1.8990e-02,  5.0614e-02,  1.4034e-03,  1.4712e-03,  3.3038e-02,\n",
              "                       1.2204e-04, -2.8566e-02,  7.3363e-03, -4.7531e-02,  4.7603e-02,\n",
              "                      -2.9628e-02,  3.1917e-02,  2.3007e-02,  3.1615e-03,  3.6457e-03,\n",
              "                       3.2369e-03, -1.0183e-02, -3.9426e-02,  3.4259e-03,  9.3539e-03,\n",
              "                      -3.6415e-02, -1.1074e-02,  1.4235e-02, -5.1532e-02,  4.0612e-02,\n",
              "                      -1.3573e-03, -3.5144e-06, -1.1620e-02, -9.8448e-03,  3.3672e-02,\n",
              "                       2.5113e-02, -2.6105e-03,  4.0778e-02, -1.7824e-02, -1.9432e-03,\n",
              "                      -3.8648e-02, -1.1935e-02,  1.6742e-03,  1.4026e-02, -1.6882e-02,\n",
              "                      -1.2153e-02, -5.4574e-02, -2.3441e-02,  3.4071e-02, -4.4799e-02,\n",
              "                      -5.7053e-02,  3.3790e-02,  1.7894e-02,  2.4853e-02, -6.0909e-02,\n",
              "                       4.4051e-02, -3.0809e-02,  1.9286e-02, -2.6916e-02, -3.0815e-02,\n",
              "                      -1.1708e-03, -1.3859e-02, -5.0569e-02,  2.4619e-02,  2.9465e-02,\n",
              "                      -3.2474e-02,  3.1781e-02, -8.5664e-03,  4.9555e-02, -1.4186e-02,\n",
              "                       6.8016e-03, -3.5963e-02, -3.7294e-02, -1.6275e-02, -1.9103e-02,\n",
              "                      -2.7591e-02, -7.6901e-02,  3.6485e-02,  3.3643e-02,  2.2016e-02,\n",
              "                      -3.1130e-02, -1.6692e-02,  6.6129e-02, -2.0033e-02, -3.1281e-03,\n",
              "                       9.7046e-03, -2.4538e-02,  1.4434e-02, -1.3467e-03, -4.3772e-02,\n",
              "                      -3.2879e-02, -5.9242e-02, -4.9724e-02, -3.6789e-02, -1.1371e-02,\n",
              "                      -3.3564e-02, -2.7474e-03, -5.3231e-02,  5.0645e-02, -1.8828e-02,\n",
              "                       7.5615e-02,  1.6436e-02,  4.1819e-02, -5.7832e-02, -7.1715e-03,\n",
              "                       7.7376e-02, -5.4443e-03, -2.5915e-02,  2.1798e-02,  5.6346e-02,\n",
              "                       5.0304e-02,  3.1747e-02,  1.0575e-01, -3.9518e-02,  2.2007e-02,\n",
              "                      -5.4787e-02, -2.1301e-02,  2.1026e-02, -1.9468e-03, -2.2097e-02,\n",
              "                      -4.0654e-02,  1.9706e-02,  1.9879e-02, -2.3893e-02, -8.4596e-03,\n",
              "                      -1.1681e-02, -8.2232e-03, -1.1276e-02,  4.4575e-02, -3.3943e-02,\n",
              "                      -3.3708e-02, -7.0706e-02,  9.5094e-03, -4.6614e-02, -7.8693e-03,\n",
              "                      -1.8660e-02,  1.5044e-02, -1.7412e-02, -7.3912e-02, -3.4823e-02,\n",
              "                      -8.5937e-03, -1.7854e-02,  3.8842e-02,  1.7584e-02, -1.9548e-02,\n",
              "                       7.9587e-03, -7.2294e-02,  5.9432e-02, -1.9302e-02, -8.5737e-03,\n",
              "                      -2.0363e-02, -7.3409e-02,  4.0556e-03,  5.9927e-02, -3.5679e-02,\n",
              "                      -3.8134e-02, -2.5653e-02,  6.9893e-02, -5.3152e-03, -1.3308e-02,\n",
              "                       7.8766e-03, -2.3792e-02,  2.0328e-02,  4.1453e-02, -4.7735e-02,\n",
              "                      -2.7346e-02, -3.4200e-02, -6.0963e-03, -3.1671e-02, -3.1857e-02,\n",
              "                      -1.5834e-02,  1.0231e-03, -8.6240e-03,  9.5869e-03,  2.6473e-02,\n",
              "                      -6.5673e-03, -1.4137e-02,  1.6174e-02, -3.4101e-02, -2.2807e-02,\n",
              "                      -2.2858e-02,  2.8926e-02, -1.1774e-02,  8.8220e-03, -1.3672e-02,\n",
              "                      -1.5898e-02])),\n",
              "             ('conv_block3.4.running_var',\n",
              "              tensor([0.9313, 0.9307, 0.9344, 0.9290, 0.9290, 0.9303, 0.9330, 0.9328, 0.9344,\n",
              "                      0.9295, 0.9275, 0.9423, 0.9303, 0.9231, 0.9265, 0.9260, 0.9309, 0.9411,\n",
              "                      0.9227, 0.9263, 0.9240, 0.9425, 0.9237, 0.9330, 0.9341, 0.9261, 0.9255,\n",
              "                      0.9418, 0.9382, 0.9300, 0.9277, 0.9339, 0.9324, 0.9332, 0.9278, 0.9239,\n",
              "                      0.9261, 0.9401, 0.9230, 0.9286, 0.9307, 0.9297, 0.9360, 0.9265, 0.9280,\n",
              "                      0.9215, 0.9236, 0.9317, 0.9271, 0.9256, 0.9315, 0.9491, 0.9330, 0.9413,\n",
              "                      0.9301, 0.9335, 0.9283, 0.9283, 0.9317, 0.9330, 0.9330, 0.9318, 0.9288,\n",
              "                      0.9250, 0.9300, 0.9361, 0.9277, 0.9317, 0.9242, 0.9302, 0.9328, 0.9243,\n",
              "                      0.9309, 0.9299, 0.9428, 0.9312, 0.9390, 0.9367, 0.9315, 0.9381, 0.9280,\n",
              "                      0.9305, 0.9310, 0.9355, 0.9235, 0.9291, 0.9244, 0.9353, 0.9440, 0.9311,\n",
              "                      0.9463, 0.9276, 0.9286, 0.9282, 0.9298, 0.9426, 0.9253, 0.9326, 0.9401,\n",
              "                      0.9356, 0.9302, 0.9317, 0.9297, 0.9330, 0.9304, 0.9225, 0.9250, 0.9348,\n",
              "                      0.9303, 0.9264, 0.9403, 0.9277, 0.9308, 0.9263, 0.9281, 0.9244, 0.9312,\n",
              "                      0.9278, 0.9263, 0.9263, 0.9542, 0.9305, 0.9321, 0.9289, 0.9593, 0.9449,\n",
              "                      0.9307, 0.9337, 0.9357, 0.9422, 0.9269, 0.9275, 0.9236, 0.9309, 0.9367,\n",
              "                      0.9338, 0.9425, 0.9252, 0.9438, 0.9375, 0.9320, 0.9310, 0.9293, 0.9299,\n",
              "                      0.9279, 0.9393, 0.9352, 0.9252, 0.9252, 0.9385, 0.9410, 0.9253, 0.9243,\n",
              "                      0.9259, 0.9429, 0.9239, 0.9260, 0.9265, 0.9238, 0.9307, 0.9365, 0.9259,\n",
              "                      0.9280, 0.9335, 0.9323, 0.9304, 0.9309, 0.9282, 0.9244, 0.9251, 0.9313,\n",
              "                      0.9283, 0.9306, 0.9330, 0.9284, 0.9279, 0.9326, 0.9274, 0.9456, 0.9383,\n",
              "                      0.9407, 0.9271, 0.9516, 0.9407, 0.9264, 0.9310, 0.9328, 0.9229, 0.9305,\n",
              "                      0.9279, 0.9398, 0.9311, 0.9323, 0.9210, 0.9292, 0.9509, 0.9316, 0.9291,\n",
              "                      0.9362, 0.9386, 0.9269, 0.9449, 0.9278, 0.9254, 0.9317, 0.9245, 0.9322,\n",
              "                      0.9294, 0.9300, 0.9304, 0.9356, 0.9382, 0.9326, 0.9346, 0.9398, 0.9274,\n",
              "                      0.9501, 0.9271, 0.9295, 0.9369, 0.9441, 0.9282, 0.9310, 0.9424, 0.9299,\n",
              "                      0.9284, 0.9324, 0.9467, 0.9298, 0.9275, 0.9288, 0.9229, 0.9405, 0.9263,\n",
              "                      0.9374, 0.9268, 0.9271, 0.9408, 0.9258, 0.9227, 0.9249, 0.9334, 0.9345,\n",
              "                      0.9292, 0.9275, 0.9268, 0.9373, 0.9266, 0.9357, 0.9293, 0.9397, 0.9345,\n",
              "                      0.9343, 0.9297, 0.9315, 0.9266])),\n",
              "             ('conv_block3.4.num_batches_tracked', tensor(1)),\n",
              "             ('conv_block4.0.weight',\n",
              "              tensor([[[[ 2.0574e-02, -1.3941e-02, -1.7885e-02],\n",
              "                        [ 3.0539e-03,  1.5242e-02,  1.3571e-02],\n",
              "                        [-1.9470e-02,  6.6267e-03, -1.0391e-02]],\n",
              "              \n",
              "                       [[-1.4327e-02,  1.1287e-02,  2.4890e-02],\n",
              "                        [-2.4837e-02,  2.6430e-02, -1.4457e-02],\n",
              "                        [-1.6939e-03,  5.6530e-03, -1.9661e-02]],\n",
              "              \n",
              "                       [[ 9.0593e-03,  1.1659e-02, -3.7496e-03],\n",
              "                        [-5.3575e-03,  1.1870e-02, -6.7788e-03],\n",
              "                        [-2.2894e-03,  2.0266e-03,  2.1738e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 6.4558e-04, -1.1908e-02,  2.5862e-02],\n",
              "                        [-2.0561e-02,  1.4912e-02, -8.4554e-03],\n",
              "                        [-8.8166e-03, -8.4035e-04,  2.1896e-02]],\n",
              "              \n",
              "                       [[ 1.7420e-02, -2.8880e-02, -1.7786e-02],\n",
              "                        [ 7.8311e-04, -2.8491e-02, -2.5579e-02],\n",
              "                        [-1.0408e-02,  1.7904e-02, -1.3999e-02]],\n",
              "              \n",
              "                       [[ 2.8043e-02,  1.6680e-02, -7.6724e-04],\n",
              "                        [-1.9481e-02,  3.9861e-03, -4.9444e-03],\n",
              "                        [-2.5613e-02, -1.8004e-02,  9.5638e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 4.1134e-03,  2.3199e-02,  1.6770e-03],\n",
              "                        [-2.2937e-02, -2.5812e-02, -1.1063e-02],\n",
              "                        [ 1.5211e-04, -9.5026e-03, -2.0177e-02]],\n",
              "              \n",
              "                       [[-7.4818e-03, -9.4794e-03,  1.5654e-02],\n",
              "                        [-1.9669e-02, -1.7248e-02,  2.0235e-02],\n",
              "                        [ 7.7586e-03,  2.9462e-02,  6.0310e-04]],\n",
              "              \n",
              "                       [[-1.5219e-02, -2.6649e-02,  4.6521e-03],\n",
              "                        [ 1.7965e-02, -1.8756e-02,  6.5271e-03],\n",
              "                        [ 9.3016e-03,  2.3973e-02, -8.7634e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.6302e-02,  1.0920e-02, -1.7821e-04],\n",
              "                        [-9.2952e-03, -1.0501e-02, -1.1220e-02],\n",
              "                        [ 2.5027e-02,  9.4716e-03,  3.1996e-03]],\n",
              "              \n",
              "                       [[-9.0763e-03, -5.1981e-03, -1.5421e-02],\n",
              "                        [ 2.3965e-02, -3.6367e-03,  8.0849e-03],\n",
              "                        [ 3.5502e-03, -2.6994e-02, -8.2668e-03]],\n",
              "              \n",
              "                       [[ 2.8116e-02, -2.5009e-02, -2.2776e-02],\n",
              "                        [-1.5862e-02, -1.1989e-02, -5.7655e-04],\n",
              "                        [ 1.2229e-02,  2.3847e-02, -2.0612e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.6533e-02, -1.5848e-02,  1.8265e-02],\n",
              "                        [-1.9800e-02,  2.6663e-02, -1.9792e-02],\n",
              "                        [ 7.9744e-03,  7.1505e-03, -2.8223e-03]],\n",
              "              \n",
              "                       [[ 1.2576e-02,  2.3424e-02, -2.2029e-02],\n",
              "                        [-7.7895e-03, -1.6283e-02,  2.7959e-02],\n",
              "                        [ 1.6373e-02, -2.4538e-02,  7.3240e-03]],\n",
              "              \n",
              "                       [[-2.2752e-02, -2.0418e-02, -2.9471e-03],\n",
              "                        [-1.8007e-02,  8.0955e-03,  1.4724e-02],\n",
              "                        [ 2.2075e-02,  1.3856e-02,  1.0619e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.3222e-02, -9.6266e-03, -9.9923e-03],\n",
              "                        [ 3.4687e-03,  2.6110e-02,  2.9218e-02],\n",
              "                        [-1.4181e-03, -2.4709e-02, -2.0530e-02]],\n",
              "              \n",
              "                       [[-1.9463e-02,  5.7967e-03, -4.5588e-03],\n",
              "                        [ 8.6963e-05,  1.8195e-02,  2.1346e-02],\n",
              "                        [-2.0885e-02, -2.3151e-02,  1.8145e-03]],\n",
              "              \n",
              "                       [[-2.5370e-02, -2.3315e-02,  2.0244e-02],\n",
              "                        [ 2.5972e-02,  1.1929e-03,  1.4995e-02],\n",
              "                        [-1.9701e-03,  2.9733e-03,  2.9149e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 2.4089e-02,  1.9146e-02,  5.8057e-03],\n",
              "                        [ 4.3034e-03,  1.4160e-02,  4.6836e-03],\n",
              "                        [-2.5239e-02, -1.5638e-02,  1.8547e-02]],\n",
              "              \n",
              "                       [[-2.1445e-02, -1.1779e-02,  2.0601e-02],\n",
              "                        [-2.8971e-02, -2.0644e-02, -8.1618e-03],\n",
              "                        [-8.8794e-03, -4.9877e-03,  2.1495e-03]],\n",
              "              \n",
              "                       [[ 1.6408e-03,  6.3838e-03, -2.0910e-02],\n",
              "                        [-3.8818e-03,  7.1847e-03,  6.9393e-03],\n",
              "                        [ 2.3764e-02, -2.0628e-02,  2.5586e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.9015e-02,  2.4531e-02,  2.0533e-02],\n",
              "                        [ 4.5635e-03, -8.0868e-03,  1.7233e-03],\n",
              "                        [-1.3202e-02, -2.4573e-02,  2.3957e-03]],\n",
              "              \n",
              "                       [[-2.5774e-02,  7.5816e-03, -2.3105e-02],\n",
              "                        [-2.5055e-02, -1.1721e-02, -1.0169e-02],\n",
              "                        [-2.6686e-02,  2.1167e-02,  8.3777e-04]],\n",
              "              \n",
              "                       [[ 2.1161e-02,  1.3558e-02, -2.8955e-02],\n",
              "                        [ 1.1782e-02,  1.0765e-02,  1.3657e-02],\n",
              "                        [ 2.9228e-02,  1.9752e-03, -2.8976e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 5.6982e-03, -1.2835e-02,  2.6313e-02],\n",
              "                        [-9.7051e-03,  2.9376e-02,  6.6540e-03],\n",
              "                        [-1.9400e-02,  6.7790e-03, -2.8119e-02]],\n",
              "              \n",
              "                       [[-1.2205e-02,  1.2359e-02, -1.7402e-02],\n",
              "                        [ 9.0652e-03,  6.8285e-03, -3.3823e-03],\n",
              "                        [ 2.2302e-02, -2.5637e-02,  2.5366e-03]],\n",
              "              \n",
              "                       [[ 2.6744e-02, -1.1905e-02, -8.3875e-03],\n",
              "                        [ 1.9822e-02, -3.5177e-03,  1.4639e-02],\n",
              "                        [-1.5355e-02,  1.4555e-02, -7.5150e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.0324e-02,  2.5583e-02,  1.5122e-03],\n",
              "                        [ 2.0702e-02,  1.6724e-02, -2.8090e-03],\n",
              "                        [ 6.2842e-03,  1.3335e-02,  3.3872e-03]],\n",
              "              \n",
              "                       [[-8.1814e-03,  3.5421e-04, -4.0591e-03],\n",
              "                        [-1.8892e-02,  1.3429e-02,  2.8305e-02],\n",
              "                        [-1.0029e-02, -1.7814e-04, -3.8300e-03]],\n",
              "              \n",
              "                       [[-1.2925e-02, -1.0068e-02, -5.0051e-04],\n",
              "                        [ 2.7449e-02,  6.2806e-03,  2.4429e-02],\n",
              "                        [ 2.6496e-02,  2.0851e-02,  1.1443e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.3769e-02,  1.6842e-02, -2.4587e-02],\n",
              "                        [-2.4124e-02, -1.1002e-02,  1.1533e-02],\n",
              "                        [ 1.4372e-02,  2.9698e-03, -2.5451e-02]],\n",
              "              \n",
              "                       [[-1.5927e-04,  5.4102e-04,  2.0925e-02],\n",
              "                        [-8.4486e-03,  9.2800e-03,  8.9990e-03],\n",
              "                        [-2.8989e-02, -2.1156e-03, -1.7337e-02]],\n",
              "              \n",
              "                       [[-1.0193e-02,  1.3442e-02, -1.2022e-02],\n",
              "                        [-1.2962e-02, -7.9148e-03, -2.3212e-02],\n",
              "                        [ 2.2507e-02, -2.1293e-02,  2.0295e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 8.5211e-04, -1.7012e-02,  1.0604e-02],\n",
              "                        [-2.7722e-02,  1.5620e-02, -6.5165e-04],\n",
              "                        [ 7.4239e-03, -2.8007e-02,  2.4684e-02]],\n",
              "              \n",
              "                       [[-9.6118e-03,  2.1972e-02,  1.4887e-02],\n",
              "                        [-5.2268e-03,  2.3810e-02, -1.7587e-02],\n",
              "                        [ 1.1431e-02,  1.1492e-02,  2.4310e-02]],\n",
              "              \n",
              "                       [[-2.9060e-02, -1.5059e-02,  1.4701e-02],\n",
              "                        [-1.9447e-02, -1.7871e-02,  1.1758e-02],\n",
              "                        [-1.9332e-02, -2.6899e-02,  2.6593e-02]]]])),\n",
              "             ('conv_block4.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block4.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv_block4.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block4.1.running_mean',\n",
              "              tensor([ 0.0601,  0.0217,  0.0465, -0.0855, -0.0356, -0.1447,  0.0195,  0.0065,\n",
              "                      -0.0081, -0.0429,  0.0282,  0.0119,  0.1426, -0.0637,  0.0340, -0.0429,\n",
              "                      -0.0387,  0.0286,  0.0197,  0.0893,  0.0088, -0.0088,  0.0191,  0.0461,\n",
              "                      -0.0705, -0.0447,  0.0470, -0.0247, -0.0104, -0.0588, -0.0966, -0.1473,\n",
              "                       0.0792, -0.0391, -0.0145,  0.0404, -0.0322,  0.0035, -0.0390, -0.0513,\n",
              "                       0.1159, -0.0251, -0.0070, -0.1521,  0.0241,  0.0270,  0.0144,  0.0157,\n",
              "                      -0.0815,  0.0420,  0.0709,  0.0351,  0.0254, -0.0656, -0.0209,  0.0167,\n",
              "                       0.0840,  0.0054, -0.0127, -0.0281,  0.0225, -0.0409,  0.0036, -0.0878,\n",
              "                       0.0267,  0.0126,  0.0478,  0.0759, -0.1117,  0.0517,  0.0512,  0.0130,\n",
              "                       0.0447,  0.0133,  0.0280, -0.0536, -0.0326, -0.0247, -0.0549,  0.0992,\n",
              "                       0.0259, -0.0488, -0.0243, -0.0431, -0.0059, -0.0303, -0.0831,  0.0491,\n",
              "                       0.0383,  0.0030, -0.0319,  0.0114, -0.0281,  0.0766,  0.0496,  0.1057,\n",
              "                      -0.0393,  0.0421,  0.0139,  0.0208,  0.0427, -0.0644, -0.0158, -0.0342,\n",
              "                       0.0686,  0.0897, -0.0075,  0.0004, -0.0291, -0.0222,  0.0552, -0.0417,\n",
              "                       0.0627,  0.0224,  0.0822,  0.0282,  0.0331, -0.0904, -0.0439, -0.0163,\n",
              "                       0.0480, -0.0070, -0.0913, -0.0019,  0.0571,  0.0177, -0.0048,  0.0222,\n",
              "                       0.0097, -0.0582, -0.0430,  0.0046, -0.0102, -0.0298,  0.0008, -0.0006,\n",
              "                       0.0243, -0.0636,  0.0359,  0.0350, -0.0046,  0.0238, -0.0100, -0.0254,\n",
              "                       0.0305, -0.0298, -0.0099,  0.0180,  0.0190,  0.0356, -0.0671,  0.0126,\n",
              "                       0.0189, -0.0047, -0.0668, -0.0911, -0.0254, -0.0856,  0.0575, -0.0389,\n",
              "                      -0.0671,  0.0074,  0.0776, -0.0371, -0.0095,  0.0067, -0.0116,  0.0859,\n",
              "                       0.0338, -0.0192,  0.0019,  0.1149,  0.0730,  0.0652, -0.0413,  0.0035,\n",
              "                      -0.0127, -0.0265,  0.0472,  0.0173, -0.0174,  0.0056, -0.0102,  0.0134,\n",
              "                      -0.0175, -0.0101,  0.0619, -0.0460,  0.0169, -0.0229,  0.0994, -0.0515,\n",
              "                       0.0897, -0.0216, -0.0314, -0.0822, -0.0237, -0.0297, -0.0206, -0.0319,\n",
              "                       0.0061,  0.0569, -0.0145, -0.0422, -0.0009,  0.0152,  0.0161, -0.0266,\n",
              "                      -0.0407, -0.0878,  0.0105, -0.0335, -0.0663, -0.0199,  0.0395,  0.0068,\n",
              "                       0.0557,  0.0839, -0.0753,  0.0744, -0.0032, -0.0128,  0.0022,  0.0349,\n",
              "                      -0.0406,  0.0497, -0.0477,  0.0481,  0.1165,  0.0364, -0.0444,  0.0357,\n",
              "                      -0.0803, -0.0057,  0.0044, -0.1127, -0.0332, -0.0668,  0.0278,  0.0322,\n",
              "                      -0.0517,  0.0129,  0.0101, -0.0630,  0.1032, -0.0309,  0.0434,  0.0116,\n",
              "                       0.0187, -0.0584,  0.0384, -0.0229, -0.0199,  0.0463, -0.0230,  0.0549,\n",
              "                       0.0297, -0.0295, -0.0142,  0.0363, -0.0350, -0.0313,  0.0400, -0.0328,\n",
              "                       0.0642,  0.0244,  0.0793, -0.0434,  0.0786, -0.0697, -0.0425, -0.0275,\n",
              "                       0.0208,  0.0038, -0.0866, -0.0326,  0.0742, -0.0880, -0.0131,  0.0378,\n",
              "                      -0.0359,  0.0104,  0.0261,  0.0396, -0.0040,  0.0040, -0.0407,  0.0401,\n",
              "                      -0.0167, -0.0620, -0.0190, -0.0443, -0.0490,  0.0332,  0.0232,  0.0622,\n",
              "                      -0.0033, -0.0212, -0.0500, -0.0454, -0.0250,  0.0454, -0.0245, -0.0321,\n",
              "                      -0.0137, -0.0465,  0.0146, -0.0359,  0.0032, -0.1085, -0.0897,  0.0523,\n",
              "                      -0.0414,  0.0511,  0.0061, -0.1311,  0.0482, -0.0562, -0.0177,  0.0949,\n",
              "                       0.0172, -0.0270, -0.0186,  0.0191,  0.0283,  0.0333, -0.0016, -0.0437,\n",
              "                       0.0494,  0.0357, -0.0017, -0.0315, -0.0408, -0.0089,  0.0328,  0.0328,\n",
              "                      -0.0420,  0.0163, -0.1037, -0.0102,  0.0425,  0.0561,  0.0436,  0.0539,\n",
              "                      -0.0491,  0.0429,  0.0578,  0.0580, -0.1543,  0.0206, -0.0754, -0.0175,\n",
              "                       0.0328, -0.0651,  0.0013, -0.0426, -0.0170,  0.0739, -0.0067,  0.0189,\n",
              "                      -0.0242,  0.1203, -0.0002, -0.0748,  0.0037, -0.0127, -0.0165, -0.0213,\n",
              "                       0.0437,  0.0765, -0.0462,  0.0375,  0.0347,  0.0017, -0.0161, -0.0115,\n",
              "                      -0.0649, -0.0019, -0.0148, -0.0104, -0.0770, -0.0438,  0.0233,  0.0653,\n",
              "                       0.0286, -0.0359,  0.0985, -0.0065,  0.0366, -0.0155,  0.0613,  0.0853,\n",
              "                       0.0297, -0.0020,  0.0821, -0.0829,  0.0137, -0.0194, -0.0557,  0.0431,\n",
              "                       0.0633,  0.0487, -0.1001,  0.0549,  0.0135,  0.0294,  0.0841, -0.0399,\n",
              "                       0.0545,  0.0457,  0.0376, -0.0396, -0.0170,  0.0249,  0.0051,  0.0564,\n",
              "                       0.0385, -0.0156, -0.0042, -0.0578, -0.0130,  0.0685, -0.0107,  0.0313,\n",
              "                       0.0003, -0.0712, -0.0137,  0.0514, -0.0299, -0.0126,  0.0691, -0.1018,\n",
              "                       0.0284,  0.0640, -0.0093,  0.0196, -0.0614,  0.0271,  0.0328,  0.0024,\n",
              "                      -0.0535,  0.0546,  0.0110,  0.0094, -0.0016,  0.0861, -0.0128,  0.0096,\n",
              "                       0.0123, -0.0038, -0.0947, -0.0846,  0.0051,  0.0921, -0.0526, -0.0334,\n",
              "                       0.0661,  0.0901,  0.0350,  0.0124, -0.0492, -0.0187, -0.0051, -0.0160,\n",
              "                       0.0468, -0.0157,  0.0216,  0.0171,  0.0430, -0.0112, -0.0304,  0.0410,\n",
              "                      -0.0846, -0.0491,  0.0181,  0.0502, -0.0109, -0.0375,  0.0681,  0.0518,\n",
              "                      -0.1048,  0.0223,  0.0558, -0.0035, -0.0034, -0.0097, -0.0755, -0.0503,\n",
              "                      -0.0286, -0.0541,  0.0022, -0.0520,  0.1184, -0.0456, -0.0224, -0.0097,\n",
              "                      -0.0299,  0.0166, -0.0311,  0.1128, -0.0226,  0.0079,  0.0104,  0.0437,\n",
              "                      -0.0565, -0.0170, -0.0513, -0.0406,  0.0283, -0.0848, -0.0558, -0.0865])),\n",
              "             ('conv_block4.1.running_var',\n",
              "              tensor([0.9389, 0.9255, 0.9376, 0.9278, 0.9303, 0.9534, 0.9312, 0.9248, 0.9298,\n",
              "                      0.9259, 0.9351, 0.9450, 0.9397, 0.9422, 0.9264, 0.9261, 0.9393, 0.9316,\n",
              "                      0.9359, 0.9391, 0.9374, 0.9313, 0.9352, 0.9298, 0.9366, 0.9346, 0.9362,\n",
              "                      0.9389, 0.9343, 0.9263, 0.9592, 0.9606, 0.9414, 0.9298, 0.9360, 0.9239,\n",
              "                      0.9456, 0.9263, 0.9443, 0.9423, 0.9521, 0.9269, 0.9245, 0.9566, 0.9325,\n",
              "                      0.9460, 0.9360, 0.9325, 0.9458, 0.9285, 0.9373, 0.9463, 0.9315, 0.9313,\n",
              "                      0.9276, 0.9446, 0.9347, 0.9340, 0.9219, 0.9455, 0.9373, 0.9305, 0.9245,\n",
              "                      0.9372, 0.9239, 0.9410, 0.9273, 0.9405, 0.9543, 0.9248, 0.9489, 0.9255,\n",
              "                      0.9291, 0.9442, 0.9488, 0.9292, 0.9425, 0.9419, 0.9439, 0.9529, 0.9308,\n",
              "                      0.9270, 0.9357, 0.9266, 0.9351, 0.9355, 0.9370, 0.9299, 0.9291, 0.9340,\n",
              "                      0.9310, 0.9380, 0.9648, 0.9476, 0.9289, 0.9507, 0.9328, 0.9262, 0.9294,\n",
              "                      0.9557, 0.9349, 0.9901, 0.9244, 0.9302, 0.9425, 0.9460, 0.9245, 0.9299,\n",
              "                      0.9249, 0.9345, 0.9291, 0.9472, 0.9211, 0.9358, 0.9376, 0.9313, 0.9363,\n",
              "                      0.9321, 0.9281, 0.9505, 0.9446, 0.9323, 0.9624, 0.9402, 0.9242, 0.9267,\n",
              "                      0.9421, 0.9549, 0.9496, 0.9329, 0.9403, 0.9324, 0.9236, 0.9390, 0.9246,\n",
              "                      0.9263, 0.9239, 0.9426, 0.9705, 0.9297, 0.9365, 0.9329, 0.9375, 0.9301,\n",
              "                      0.9249, 0.9426, 0.9273, 0.9294, 0.9293, 0.9253, 0.9303, 0.9313, 0.9410,\n",
              "                      0.9367, 0.9326, 0.9431, 0.9403, 0.9249, 0.9393, 0.9426, 0.9384, 0.9255,\n",
              "                      0.9405, 0.9253, 0.9334, 0.9242, 0.9398, 0.9383, 0.9247, 0.9257, 0.9276,\n",
              "                      0.9389, 0.9260, 0.9308, 0.9346, 0.9323, 0.9232, 0.9321, 0.9384, 0.9429,\n",
              "                      0.9379, 0.9322, 0.9351, 0.9259, 0.9255, 0.9328, 0.9291, 0.9375, 0.9394,\n",
              "                      0.9430, 0.9480, 0.9408, 0.9320, 0.9282, 0.9241, 0.9523, 0.9230, 0.9287,\n",
              "                      0.9412, 0.9373, 0.9369, 0.9319, 0.9292, 0.9289, 0.9448, 0.9228, 0.9504,\n",
              "                      0.9205, 0.9559, 0.9470, 0.9258, 0.9325, 0.9396, 0.9460, 0.9307, 0.9256,\n",
              "                      0.9433, 0.9533, 0.9395, 0.9391, 0.9306, 0.9665, 0.9297, 0.9313, 0.9297,\n",
              "                      0.9339, 0.9331, 0.9245, 0.9436, 0.9347, 0.9364, 0.9291, 0.9456, 0.9339,\n",
              "                      0.9414, 0.9678, 0.9365, 0.9374, 0.9283, 0.9316, 0.9298, 0.9229, 0.9218,\n",
              "                      0.9421, 0.9560, 0.9327, 0.9266, 0.9566, 0.9333, 0.9460, 0.9310, 0.9306,\n",
              "                      0.9333, 0.9409, 0.9330, 0.9376, 0.9427, 0.9289, 0.9439, 0.9364, 0.9374,\n",
              "                      0.9509, 0.9283, 0.9242, 0.9583, 0.9309, 0.9364, 0.9353, 0.9409, 0.9357,\n",
              "                      0.9433, 0.9466, 0.9434, 0.9266, 0.9225, 0.9224, 0.9498, 0.9305, 0.9259,\n",
              "                      0.9473, 0.9420, 0.9281, 0.9300, 0.9225, 0.9266, 0.9266, 0.9241, 0.9378,\n",
              "                      0.9284, 0.9493, 0.9331, 0.9416, 0.9477, 0.9346, 0.9366, 0.9498, 0.9286,\n",
              "                      0.9245, 0.9442, 0.9278, 0.9311, 0.9358, 0.9312, 0.9322, 0.9257, 0.9270,\n",
              "                      0.9480, 0.9439, 0.9259, 0.9396, 0.9693, 0.9388, 0.9260, 0.9300, 0.9313,\n",
              "                      0.9600, 0.9591, 0.9289, 0.9268, 0.9329, 0.9250, 0.9300, 0.9406, 0.9310,\n",
              "                      0.9291, 0.9344, 0.9308, 0.9264, 0.9329, 0.9373, 0.9297, 0.9562, 0.9336,\n",
              "                      0.9437, 0.9361, 0.9436, 0.9445, 0.9256, 0.9345, 0.9266, 0.9340, 0.9236,\n",
              "                      0.9253, 0.9370, 0.9326, 0.9344, 0.9329, 0.9386, 0.9480, 0.9222, 0.9364,\n",
              "                      0.9232, 0.9343, 0.9353, 0.9302, 0.9401, 0.9311, 0.9299, 0.9343, 0.9239,\n",
              "                      0.9243, 0.9390, 0.9434, 0.9329, 0.9326, 0.9373, 0.9291, 0.9475, 0.9233,\n",
              "                      0.9383, 0.9230, 0.9271, 0.9216, 0.9327, 0.9419, 0.9370, 0.9305, 0.9494,\n",
              "                      0.9327, 0.9306, 0.9368, 0.9294, 0.9350, 0.9763, 0.9547, 0.9409, 0.9514,\n",
              "                      0.9471, 0.9370, 0.9433, 0.9353, 0.9313, 0.9338, 0.9428, 0.9476, 0.9385,\n",
              "                      0.9431, 0.9269, 0.9306, 0.9299, 0.9407, 0.9422, 0.9507, 0.9275, 0.9281,\n",
              "                      0.9223, 0.9405, 0.9259, 0.9300, 0.9256, 0.9356, 0.9282, 0.9355, 0.9276,\n",
              "                      0.9234, 0.9277, 0.9351, 0.9285, 0.9245, 0.9370, 0.9385, 0.9530, 0.9697,\n",
              "                      0.9458, 0.9292, 0.9417, 0.9277, 0.9325, 0.9410, 0.9300, 0.9434, 0.9380,\n",
              "                      0.9471, 0.9398, 0.9262, 0.9210, 0.9365, 0.9384, 0.9269, 0.9319, 0.9236,\n",
              "                      0.9307, 0.9402, 0.9611, 0.9407, 0.9293, 0.9244, 0.9317, 0.9303, 0.9284,\n",
              "                      0.9447, 0.9378, 0.9351, 0.9339, 0.9354, 0.9304, 0.9305, 0.9424, 0.9349,\n",
              "                      0.9193, 0.9312, 0.9283, 0.9222, 0.9309, 0.9521, 0.9213, 0.9282, 0.9602,\n",
              "                      0.9228, 0.9259, 0.9285, 0.9368, 0.9318, 0.9464, 0.9324, 0.9548, 0.9473,\n",
              "                      0.9315, 0.9315, 0.9355, 0.9407, 0.9371, 0.9408, 0.9288, 0.9297, 0.9292,\n",
              "                      0.9318, 0.9348, 0.9322, 0.9301, 0.9322, 0.9323, 0.9345, 0.9288, 0.9316,\n",
              "                      0.9273, 0.9309, 0.9230, 0.9446, 0.9671, 0.9437, 0.9288, 0.9343, 0.9302,\n",
              "                      0.9358, 0.9375, 0.9319, 0.9336, 0.9296, 0.9398, 0.9247, 0.9272])),\n",
              "             ('conv_block4.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv_block4.3.weight',\n",
              "              tensor([[[[ 0.0243, -0.0184,  0.0214],\n",
              "                        [ 0.0168, -0.0100, -0.0038],\n",
              "                        [ 0.0088, -0.0092, -0.0162]],\n",
              "              \n",
              "                       [[-0.0211,  0.0004, -0.0089],\n",
              "                        [-0.0132,  0.0230, -0.0064],\n",
              "                        [ 0.0137,  0.0226,  0.0008]],\n",
              "              \n",
              "                       [[ 0.0068,  0.0227,  0.0199],\n",
              "                        [-0.0117,  0.0236, -0.0166],\n",
              "                        [ 0.0103, -0.0139, -0.0153]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0038,  0.0079,  0.0048],\n",
              "                        [-0.0008,  0.0120,  0.0235],\n",
              "                        [ 0.0016,  0.0084, -0.0237]],\n",
              "              \n",
              "                       [[-0.0094, -0.0196, -0.0027],\n",
              "                        [ 0.0225,  0.0097, -0.0070],\n",
              "                        [ 0.0009, -0.0210, -0.0004]],\n",
              "              \n",
              "                       [[ 0.0190,  0.0239,  0.0192],\n",
              "                        [-0.0099,  0.0046,  0.0189],\n",
              "                        [ 0.0249,  0.0005,  0.0108]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0042,  0.0063, -0.0217],\n",
              "                        [-0.0239,  0.0068,  0.0153],\n",
              "                        [-0.0216,  0.0067, -0.0087]],\n",
              "              \n",
              "                       [[ 0.0001, -0.0114,  0.0195],\n",
              "                        [-0.0126,  0.0246,  0.0173],\n",
              "                        [-0.0088, -0.0197, -0.0252]],\n",
              "              \n",
              "                       [[ 0.0172,  0.0083, -0.0186],\n",
              "                        [ 0.0056,  0.0150, -0.0039],\n",
              "                        [ 0.0172, -0.0106, -0.0216]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0085,  0.0221,  0.0175],\n",
              "                        [-0.0220, -0.0036,  0.0227],\n",
              "                        [ 0.0171,  0.0043, -0.0101]],\n",
              "              \n",
              "                       [[-0.0072,  0.0014,  0.0113],\n",
              "                        [ 0.0049, -0.0019,  0.0138],\n",
              "                        [ 0.0227, -0.0250,  0.0085]],\n",
              "              \n",
              "                       [[-0.0031, -0.0148, -0.0010],\n",
              "                        [ 0.0005, -0.0143, -0.0026],\n",
              "                        [-0.0006,  0.0028, -0.0149]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0128,  0.0109,  0.0232],\n",
              "                        [ 0.0076, -0.0241,  0.0082],\n",
              "                        [ 0.0190, -0.0146,  0.0188]],\n",
              "              \n",
              "                       [[-0.0079,  0.0230,  0.0195],\n",
              "                        [ 0.0101,  0.0088,  0.0217],\n",
              "                        [-0.0144, -0.0162, -0.0241]],\n",
              "              \n",
              "                       [[-0.0205, -0.0002, -0.0085],\n",
              "                        [ 0.0177,  0.0197,  0.0141],\n",
              "                        [ 0.0088,  0.0229, -0.0196]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0128,  0.0206, -0.0072],\n",
              "                        [-0.0072, -0.0155,  0.0135],\n",
              "                        [-0.0230, -0.0040,  0.0102]],\n",
              "              \n",
              "                       [[ 0.0171,  0.0165, -0.0237],\n",
              "                        [ 0.0038, -0.0227, -0.0051],\n",
              "                        [ 0.0188, -0.0238, -0.0219]],\n",
              "              \n",
              "                       [[ 0.0088,  0.0159,  0.0175],\n",
              "                        [ 0.0063,  0.0202, -0.0017],\n",
              "                        [ 0.0105,  0.0179,  0.0048]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0044, -0.0192, -0.0162],\n",
              "                        [-0.0189,  0.0051, -0.0151],\n",
              "                        [-0.0110,  0.0227, -0.0137]],\n",
              "              \n",
              "                       [[ 0.0205,  0.0011,  0.0020],\n",
              "                        [-0.0098, -0.0215, -0.0198],\n",
              "                        [ 0.0007, -0.0181, -0.0083]],\n",
              "              \n",
              "                       [[ 0.0231,  0.0181, -0.0101],\n",
              "                        [-0.0163, -0.0015,  0.0138],\n",
              "                        [ 0.0033,  0.0058,  0.0129]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0124, -0.0163, -0.0119],\n",
              "                        [-0.0250,  0.0127, -0.0003],\n",
              "                        [ 0.0064,  0.0100,  0.0253]],\n",
              "              \n",
              "                       [[ 0.0231,  0.0033,  0.0164],\n",
              "                        [ 0.0219, -0.0038,  0.0224],\n",
              "                        [-0.0068, -0.0041, -0.0020]],\n",
              "              \n",
              "                       [[-0.0140, -0.0095, -0.0038],\n",
              "                        [ 0.0047, -0.0186,  0.0183],\n",
              "                        [-0.0145,  0.0032,  0.0018]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0136,  0.0179, -0.0083],\n",
              "                        [ 0.0152,  0.0114, -0.0004],\n",
              "                        [-0.0031, -0.0005, -0.0202]],\n",
              "              \n",
              "                       [[-0.0117, -0.0089,  0.0206],\n",
              "                        [ 0.0076,  0.0184,  0.0073],\n",
              "                        [-0.0129,  0.0210, -0.0106]],\n",
              "              \n",
              "                       [[-0.0034, -0.0224, -0.0143],\n",
              "                        [ 0.0170, -0.0202,  0.0003],\n",
              "                        [-0.0009,  0.0058,  0.0094]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0017, -0.0160, -0.0243],\n",
              "                        [ 0.0023, -0.0234,  0.0057],\n",
              "                        [ 0.0105, -0.0220, -0.0209]],\n",
              "              \n",
              "                       [[-0.0023, -0.0115, -0.0196],\n",
              "                        [ 0.0080,  0.0013, -0.0255],\n",
              "                        [ 0.0052, -0.0168, -0.0227]],\n",
              "              \n",
              "                       [[-0.0255,  0.0033,  0.0225],\n",
              "                        [-0.0003,  0.0090, -0.0182],\n",
              "                        [ 0.0216, -0.0066,  0.0038]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0015,  0.0090,  0.0108],\n",
              "                        [ 0.0038,  0.0183,  0.0185],\n",
              "                        [ 0.0047, -0.0024, -0.0018]],\n",
              "              \n",
              "                       [[-0.0187, -0.0244,  0.0056],\n",
              "                        [-0.0170, -0.0164,  0.0156],\n",
              "                        [-0.0223, -0.0113, -0.0011]],\n",
              "              \n",
              "                       [[ 0.0241,  0.0178,  0.0032],\n",
              "                        [ 0.0171, -0.0078,  0.0090],\n",
              "                        [-0.0032,  0.0216, -0.0148]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0246,  0.0064,  0.0008],\n",
              "                        [ 0.0091, -0.0202,  0.0121],\n",
              "                        [ 0.0241, -0.0122, -0.0247]],\n",
              "              \n",
              "                       [[-0.0104, -0.0153, -0.0092],\n",
              "                        [-0.0223,  0.0200,  0.0143],\n",
              "                        [ 0.0121, -0.0247,  0.0070]],\n",
              "              \n",
              "                       [[ 0.0046,  0.0157, -0.0083],\n",
              "                        [ 0.0237,  0.0189,  0.0032],\n",
              "                        [ 0.0128, -0.0241,  0.0153]]]])),\n",
              "             ('conv_block4.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block4.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv_block4.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block4.4.running_mean',\n",
              "              tensor([-3.5661e-02, -6.5263e-03,  6.7747e-03, -1.0710e-02,  2.2006e-02,\n",
              "                       1.5796e-02, -1.7120e-02, -3.5933e-02, -4.8789e-03,  5.5362e-02,\n",
              "                       2.1384e-02, -4.6571e-02, -2.4149e-03, -8.3137e-04,  5.0660e-02,\n",
              "                       1.4551e-02,  1.5055e-02,  1.1941e-02,  6.3980e-03, -1.7291e-02,\n",
              "                      -4.8991e-02, -3.0291e-02, -2.6389e-02, -3.3294e-02, -1.0039e-02,\n",
              "                      -1.8717e-02, -1.9873e-02,  1.0707e-02, -1.6905e-02,  5.4159e-02,\n",
              "                       2.8137e-02, -1.4005e-02, -3.2021e-02,  1.9220e-02, -3.6796e-02,\n",
              "                       6.4860e-03,  4.4237e-02,  7.4961e-03, -1.2547e-04, -1.4008e-02,\n",
              "                      -6.1426e-03,  2.5864e-02,  9.4220e-03, -1.0565e-02, -1.4422e-02,\n",
              "                       3.7364e-02,  1.8138e-02,  2.0562e-02, -4.5735e-03,  2.5452e-02,\n",
              "                      -1.3819e-03, -1.6568e-02,  3.8497e-02, -6.0139e-03, -6.3744e-02,\n",
              "                      -6.3438e-03,  7.7707e-03,  3.6379e-02,  1.4298e-03, -1.8872e-02,\n",
              "                      -2.1868e-02, -4.7768e-04,  4.3747e-03,  1.1405e-02, -2.6889e-03,\n",
              "                       2.6047e-02, -5.9328e-03,  1.7400e-02, -2.4636e-03,  5.2378e-02,\n",
              "                       1.1688e-02,  3.6471e-02,  4.8167e-02,  1.7402e-02, -2.2508e-02,\n",
              "                       1.5651e-02,  1.2722e-02, -2.1891e-02,  1.2981e-02,  2.6931e-02,\n",
              "                      -5.3243e-03, -1.5598e-02,  7.1720e-03,  2.4892e-02,  9.0392e-03,\n",
              "                       2.0132e-02, -2.9491e-02, -2.2493e-02, -1.9099e-02, -4.4074e-02,\n",
              "                       2.6764e-02,  2.2564e-02, -6.7278e-03,  2.9254e-02, -1.5354e-02,\n",
              "                       2.8763e-02,  2.4561e-02, -5.6480e-03,  1.1349e-02,  5.7226e-02,\n",
              "                      -2.0385e-02,  1.1229e-02,  4.9436e-03, -1.3976e-02, -3.5729e-02,\n",
              "                       1.8305e-02, -3.9493e-02,  2.0761e-02, -9.3874e-03,  2.5298e-02,\n",
              "                      -4.8235e-02, -1.8687e-02, -1.6209e-03, -2.7752e-03, -2.4922e-02,\n",
              "                      -3.4302e-02,  6.1811e-04, -4.6724e-03, -5.4841e-02,  1.6451e-02,\n",
              "                      -4.9910e-04, -1.2030e-02,  3.4428e-02,  2.0130e-03,  2.0794e-02,\n",
              "                       6.1485e-02, -8.3343e-03, -2.4055e-02,  4.6891e-02, -3.3846e-02,\n",
              "                       4.9750e-02,  3.7209e-03, -9.6383e-03, -2.7963e-02, -3.4666e-02,\n",
              "                      -7.9502e-03,  1.4465e-02,  4.5903e-02,  4.9698e-02, -3.7674e-02,\n",
              "                       4.1855e-02, -2.2036e-02, -1.8799e-02,  1.2390e-02, -6.8640e-02,\n",
              "                      -1.0267e-03,  2.3956e-02, -2.3324e-03,  2.4971e-02, -1.5746e-02,\n",
              "                       1.6664e-02, -8.5663e-03, -2.8854e-02,  9.6135e-03,  1.5073e-02,\n",
              "                      -5.9716e-03, -5.5391e-03, -2.7606e-02, -3.0552e-03,  1.1399e-02,\n",
              "                      -1.0337e-02,  2.5286e-02, -1.3090e-02,  2.8908e-02, -2.8656e-02,\n",
              "                       1.4949e-02,  2.2740e-02,  6.5893e-03, -3.3520e-02, -3.9863e-02,\n",
              "                       2.5269e-02,  1.0114e-01,  3.2658e-03,  4.7110e-02,  9.8730e-03,\n",
              "                      -2.2342e-02,  1.2351e-02, -1.2201e-02,  5.2629e-03, -2.4319e-02,\n",
              "                      -4.2783e-02,  3.7001e-02, -7.9947e-03, -3.1030e-02,  2.1809e-02,\n",
              "                       2.9274e-02, -2.4642e-03,  1.6277e-02,  4.0089e-03,  2.3465e-02,\n",
              "                      -6.8033e-03,  4.4387e-02, -2.8181e-03, -4.3074e-02, -1.9271e-02,\n",
              "                      -1.6686e-02,  7.4869e-03, -2.5980e-02, -2.5382e-02,  1.8145e-02,\n",
              "                       1.1823e-02,  3.5195e-02,  2.3049e-02, -2.5564e-02, -2.8556e-02,\n",
              "                      -3.4537e-02,  1.7783e-02, -2.5978e-02,  2.1012e-02,  2.0074e-02,\n",
              "                       3.7744e-02, -4.3839e-02,  3.0065e-02, -8.0932e-03,  1.6737e-02,\n",
              "                      -2.0460e-02, -5.2897e-02, -4.6217e-02, -1.0400e-02,  1.6739e-02,\n",
              "                      -1.3330e-02, -1.5756e-02, -6.2419e-03,  2.6177e-02, -3.1400e-02,\n",
              "                       9.9346e-03, -1.6204e-02,  2.0416e-02, -5.0579e-02,  2.2940e-02,\n",
              "                      -4.3449e-02,  3.4433e-02, -1.0848e-02,  2.3858e-03,  3.7343e-02,\n",
              "                      -5.4160e-02, -1.5223e-02,  8.9724e-03,  3.8873e-02,  4.9787e-03,\n",
              "                      -5.8194e-03, -5.2634e-02,  4.6965e-02, -2.5207e-02,  1.1214e-02,\n",
              "                      -4.7135e-03, -3.5364e-02, -2.5282e-03, -3.4121e-02, -1.8745e-03,\n",
              "                      -2.2756e-02, -1.4460e-02,  4.5202e-02, -2.8423e-03, -2.7437e-02,\n",
              "                      -2.4366e-02,  8.0409e-02,  5.7243e-03, -7.3142e-03,  1.4477e-02,\n",
              "                      -2.3277e-02,  4.0712e-02,  4.3122e-03, -8.2745e-03, -1.0726e-02,\n",
              "                       2.0428e-03, -3.4756e-02, -2.7389e-02, -3.7962e-04, -1.4316e-02,\n",
              "                      -3.2980e-02, -5.5824e-02,  1.5102e-02, -4.2495e-03, -4.6377e-02,\n",
              "                       2.6052e-02, -1.4685e-03, -6.6311e-03,  1.9289e-02, -6.1012e-03,\n",
              "                       1.5564e-02, -1.9354e-02,  1.6333e-02,  2.0166e-02, -2.8053e-03,\n",
              "                       3.2413e-02, -2.0503e-03,  1.5065e-02,  2.5884e-02, -9.1258e-03,\n",
              "                       1.6382e-02, -2.5025e-02, -8.9722e-03,  8.0810e-05, -1.9073e-02,\n",
              "                       8.7692e-03,  2.3864e-02, -1.3454e-02, -9.2812e-03,  4.9246e-02,\n",
              "                      -2.2377e-02, -3.1031e-02, -6.3677e-02,  5.9781e-02, -4.7737e-03,\n",
              "                       1.0034e-02,  6.4820e-02,  1.9233e-02, -1.0381e-02, -4.9595e-02,\n",
              "                      -1.9107e-02,  3.3801e-02, -1.2771e-02, -2.8610e-02, -6.4335e-02,\n",
              "                      -3.2996e-02,  1.8644e-02, -2.3620e-03,  7.0620e-02, -8.0913e-03,\n",
              "                       5.9652e-02, -4.9243e-02, -1.3970e-02, -2.2746e-02, -7.6058e-03,\n",
              "                       5.3650e-02,  2.6271e-02, -3.8072e-02, -1.7907e-02, -3.3172e-02,\n",
              "                       3.6751e-02,  4.7637e-02,  6.1183e-02, -2.2888e-02,  1.5735e-02,\n",
              "                       7.4835e-02,  4.2152e-02,  7.2828e-03,  1.8150e-02,  1.6259e-03,\n",
              "                      -3.8882e-02, -3.1264e-02, -2.2099e-03, -6.1851e-02, -3.8781e-02,\n",
              "                       4.3944e-03,  7.2970e-03, -3.0570e-02, -1.7882e-02,  7.8860e-03,\n",
              "                      -4.3349e-04,  1.4760e-02, -4.2640e-04, -3.4985e-03,  1.6959e-03,\n",
              "                       1.7974e-03, -2.1419e-02, -8.3359e-03, -5.2189e-03, -1.2963e-02,\n",
              "                       1.9751e-02, -1.1668e-02, -3.9192e-02,  3.4637e-02,  6.6703e-03,\n",
              "                       3.1409e-02, -3.6872e-02, -4.5462e-03, -2.0312e-02, -5.3547e-03,\n",
              "                       7.4261e-03,  2.0516e-02,  1.4290e-02,  1.1767e-02,  4.7495e-02,\n",
              "                       2.0451e-02,  5.2391e-03,  3.0344e-02, -2.2519e-02,  4.2201e-04,\n",
              "                       3.1565e-02,  4.2229e-02,  1.2302e-02, -9.5121e-03,  4.4211e-03,\n",
              "                       1.4435e-02,  8.3568e-03, -1.2771e-02,  9.5045e-03, -2.9680e-02,\n",
              "                      -1.6349e-02, -1.7946e-02, -4.2833e-02, -5.2255e-03, -5.5679e-02,\n",
              "                      -9.3396e-03,  1.3885e-02, -3.9624e-03,  6.5182e-02, -1.9269e-04,\n",
              "                      -8.5120e-03,  3.2499e-02, -2.4917e-02, -1.3732e-02,  1.3014e-02,\n",
              "                      -1.6952e-03, -4.5478e-02, -1.0602e-02,  3.0807e-03,  1.5270e-02,\n",
              "                       1.9699e-02, -4.6903e-02,  8.5603e-03, -4.3346e-02,  3.7640e-02,\n",
              "                       3.2783e-04, -3.7665e-03, -5.7034e-02,  2.4302e-02, -1.9270e-02,\n",
              "                      -6.7717e-03, -4.7194e-03,  1.8543e-02,  3.1780e-02, -3.4212e-02,\n",
              "                       3.0484e-02, -1.5308e-02,  2.3158e-02, -7.0261e-02, -7.8817e-03,\n",
              "                      -8.2989e-03, -6.5670e-03,  1.7188e-02, -6.6963e-02, -1.8143e-02,\n",
              "                       1.2593e-02,  1.3946e-02,  3.5526e-02,  2.4416e-02,  4.2486e-02,\n",
              "                       2.5672e-02,  2.7923e-02, -5.5718e-03, -6.2982e-02, -1.3898e-02,\n",
              "                      -7.8850e-03,  2.3894e-02,  3.0025e-02,  2.0200e-02,  1.8218e-02,\n",
              "                       5.3529e-02, -8.4531e-03, -8.7024e-03, -5.7130e-02, -6.9242e-02,\n",
              "                      -2.8738e-02,  4.9162e-02,  4.7054e-02, -4.7833e-02, -1.8144e-02,\n",
              "                      -1.0089e-02,  1.6664e-02, -9.8389e-03,  1.7094e-02,  5.3881e-02,\n",
              "                       1.0608e-02, -5.5499e-03, -2.3962e-02,  7.5363e-02,  1.1101e-02,\n",
              "                      -1.2210e-02,  1.2227e-02,  3.3045e-02, -2.1983e-02, -1.7583e-02,\n",
              "                       9.7119e-03,  2.3225e-02, -3.1925e-02,  9.3821e-03, -4.4864e-02,\n",
              "                      -4.9630e-02, -1.4436e-02,  1.7193e-02,  2.8786e-03, -2.3318e-02,\n",
              "                       2.4891e-02, -2.9364e-02, -3.5542e-02, -2.6077e-02,  2.9240e-02,\n",
              "                       2.9451e-03, -1.7199e-02,  2.5091e-02, -3.1210e-02, -7.8530e-03,\n",
              "                      -2.0513e-02,  8.0256e-03,  2.4772e-02, -1.2253e-02,  1.4821e-02,\n",
              "                      -1.2933e-05,  4.4101e-02,  2.9071e-02,  1.4184e-02,  8.1557e-03,\n",
              "                      -2.7552e-02,  3.5032e-02,  3.9932e-03,  4.0334e-02,  3.2104e-02,\n",
              "                       6.6364e-04,  2.7950e-02])),\n",
              "             ('conv_block4.4.running_var',\n",
              "              tensor([0.9237, 0.9196, 0.9249, 0.9299, 0.9205, 0.9286, 0.9399, 0.9312, 0.9264,\n",
              "                      0.9286, 0.9250, 0.9250, 0.9278, 0.9279, 0.9368, 0.9287, 0.9378, 0.9249,\n",
              "                      0.9251, 0.9242, 0.9311, 0.9222, 0.9215, 0.9253, 0.9238, 0.9203, 0.9207,\n",
              "                      0.9258, 0.9269, 0.9303, 0.9250, 0.9367, 0.9326, 0.9379, 0.9316, 0.9219,\n",
              "                      0.9269, 0.9245, 0.9202, 0.9266, 0.9292, 0.9226, 0.9263, 0.9255, 0.9322,\n",
              "                      0.9386, 0.9238, 0.9265, 0.9279, 0.9346, 0.9232, 0.9314, 0.9270, 0.9301,\n",
              "                      0.9222, 0.9219, 0.9264, 0.9252, 0.9212, 0.9520, 0.9227, 0.9348, 0.9300,\n",
              "                      0.9244, 0.9269, 0.9279, 0.9266, 0.9348, 0.9217, 0.9185, 0.9235, 0.9269,\n",
              "                      0.9417, 0.9212, 0.9253, 0.9339, 0.9247, 0.9302, 0.9256, 0.9227, 0.9316,\n",
              "                      0.9245, 0.9257, 0.9200, 0.9244, 0.9241, 0.9230, 0.9400, 0.9233, 0.9234,\n",
              "                      0.9252, 0.9286, 0.9302, 0.9214, 0.9203, 0.9254, 0.9269, 0.9326, 0.9275,\n",
              "                      0.9286, 0.9287, 0.9325, 0.9275, 0.9262, 0.9310, 0.9347, 0.9231, 0.9252,\n",
              "                      0.9353, 0.9300, 0.9281, 0.9203, 0.9347, 0.9284, 0.9244, 0.9354, 0.9213,\n",
              "                      0.9275, 0.9450, 0.9304, 0.9227, 0.9271, 0.9240, 0.9205, 0.9322, 0.9495,\n",
              "                      0.9212, 0.9256, 0.9216, 0.9253, 0.9209, 0.9236, 0.9229, 0.9235, 0.9261,\n",
              "                      0.9225, 0.9266, 0.9280, 0.9329, 0.9264, 0.9244, 0.9369, 0.9306, 0.9255,\n",
              "                      0.9284, 0.9299, 0.9386, 0.9210, 0.9246, 0.9385, 0.9239, 0.9223, 0.9312,\n",
              "                      0.9257, 0.9247, 0.9209, 0.9225, 0.9214, 0.9200, 0.9275, 0.9275, 0.9273,\n",
              "                      0.9256, 0.9273, 0.9446, 0.9225, 0.9224, 0.9266, 0.9333, 0.9267, 0.9310,\n",
              "                      0.9481, 0.9274, 0.9358, 0.9223, 0.9310, 0.9300, 0.9219, 0.9212, 0.9251,\n",
              "                      0.9300, 0.9289, 0.9305, 0.9235, 0.9239, 0.9340, 0.9315, 0.9237, 0.9261,\n",
              "                      0.9262, 0.9258, 0.9262, 0.9242, 0.9255, 0.9262, 0.9284, 0.9278, 0.9534,\n",
              "                      0.9224, 0.9366, 0.9219, 0.9253, 0.9251, 0.9227, 0.9296, 0.9267, 0.9233,\n",
              "                      0.9344, 0.9274, 0.9256, 0.9240, 0.9262, 0.9262, 0.9232, 0.9244, 0.9196,\n",
              "                      0.9299, 0.9269, 0.9296, 0.9309, 0.9332, 0.9217, 0.9220, 0.9252, 0.9215,\n",
              "                      0.9282, 0.9303, 0.9191, 0.9257, 0.9200, 0.9276, 0.9252, 0.9248, 0.9325,\n",
              "                      0.9337, 0.9334, 0.9226, 0.9215, 0.9241, 0.9243, 0.9335, 0.9341, 0.9257,\n",
              "                      0.9277, 0.9341, 0.9242, 0.9249, 0.9261, 0.9296, 0.9328, 0.9234, 0.9275,\n",
              "                      0.9263, 0.9285, 0.9227, 0.9241, 0.9307, 0.9250, 0.9320, 0.9232, 0.9272,\n",
              "                      0.9306, 0.9210, 0.9275, 0.9271, 0.9227, 0.9245, 0.9285, 0.9236, 0.9245,\n",
              "                      0.9398, 0.9345, 0.9237, 0.9207, 0.9331, 0.9261, 0.9385, 0.9290, 0.9259,\n",
              "                      0.9255, 0.9254, 0.9268, 0.9279, 0.9287, 0.9294, 0.9233, 0.9238, 0.9357,\n",
              "                      0.9284, 0.9275, 0.9328, 0.9237, 0.9325, 0.9277, 0.9329, 0.9203, 0.9271,\n",
              "                      0.9319, 0.9327, 0.9345, 0.9365, 0.9293, 0.9380, 0.9327, 0.9282, 0.9226,\n",
              "                      0.9260, 0.9224, 0.9333, 0.9222, 0.9283, 0.9257, 0.9236, 0.9305, 0.9327,\n",
              "                      0.9265, 0.9303, 0.9369, 0.9337, 0.9330, 0.9227, 0.9261, 0.9297, 0.9245,\n",
              "                      0.9225, 0.9437, 0.9287, 0.9235, 0.9219, 0.9274, 0.9256, 0.9279, 0.9357,\n",
              "                      0.9361, 0.9253, 0.9247, 0.9363, 0.9198, 0.9311, 0.9291, 0.9351, 0.9249,\n",
              "                      0.9240, 0.9296, 0.9285, 0.9299, 0.9267, 0.9250, 0.9214, 0.9346, 0.9246,\n",
              "                      0.9260, 0.9262, 0.9201, 0.9284, 0.9225, 0.9322, 0.9259, 0.9217, 0.9450,\n",
              "                      0.9316, 0.9226, 0.9405, 0.9209, 0.9330, 0.9241, 0.9269, 0.9422, 0.9282,\n",
              "                      0.9209, 0.9238, 0.9224, 0.9301, 0.9211, 0.9287, 0.9218, 0.9185, 0.9367,\n",
              "                      0.9442, 0.9202, 0.9346, 0.9275, 0.9299, 0.9238, 0.9309, 0.9282, 0.9260,\n",
              "                      0.9207, 0.9296, 0.9231, 0.9253, 0.9208, 0.9306, 0.9233, 0.9201, 0.9233,\n",
              "                      0.9262, 0.9250, 0.9335, 0.9272, 0.9291, 0.9214, 0.9261, 0.9306, 0.9292,\n",
              "                      0.9275, 0.9297, 0.9223, 0.9191, 0.9292, 0.9310, 0.9310, 0.9232, 0.9461,\n",
              "                      0.9263, 0.9224, 0.9266, 0.9284, 0.9262, 0.9215, 0.9277, 0.9296, 0.9252,\n",
              "                      0.9243, 0.9250, 0.9248, 0.9263, 0.9231, 0.9240, 0.9238, 0.9227, 0.9264,\n",
              "                      0.9249, 0.9350, 0.9280, 0.9279, 0.9243, 0.9353, 0.9220, 0.9256, 0.9230,\n",
              "                      0.9323, 0.9255, 0.9352, 0.9232, 0.9211, 0.9314, 0.9245, 0.9268, 0.9238,\n",
              "                      0.9337, 0.9307, 0.9255, 0.9243, 0.9325, 0.9324, 0.9339, 0.9254, 0.9247,\n",
              "                      0.9309, 0.9272, 0.9305, 0.9300, 0.9227, 0.9235, 0.9253, 0.9243, 0.9273,\n",
              "                      0.9304, 0.9252, 0.9244, 0.9279, 0.9269, 0.9237, 0.9212, 0.9282, 0.9234,\n",
              "                      0.9303, 0.9305, 0.9568, 0.9340, 0.9187, 0.9282, 0.9319, 0.9315, 0.9264,\n",
              "                      0.9221, 0.9315, 0.9258, 0.9410, 0.9226, 0.9248, 0.9277, 0.9287, 0.9296,\n",
              "                      0.9316, 0.9351, 0.9320, 0.9244, 0.9302, 0.9281, 0.9310, 0.9277, 0.9310,\n",
              "                      0.9332, 0.9205, 0.9216, 0.9273, 0.9285, 0.9285, 0.9203, 0.9221])),\n",
              "             ('conv_block4.4.num_batches_tracked', tensor(1)),\n",
              "             ('conv_block5.0.weight',\n",
              "              tensor([[[[-0.0106, -0.0226,  0.0096],\n",
              "                        [-0.0182,  0.0022, -0.0167],\n",
              "                        [ 0.0019, -0.0117, -0.0009]],\n",
              "              \n",
              "                       [[ 0.0105,  0.0095,  0.0217],\n",
              "                        [ 0.0137,  0.0106, -0.0223],\n",
              "                        [-0.0248, -0.0177, -0.0132]],\n",
              "              \n",
              "                       [[-0.0156,  0.0103, -0.0189],\n",
              "                        [ 0.0045,  0.0191, -0.0019],\n",
              "                        [ 0.0096,  0.0078, -0.0250]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0031, -0.0105, -0.0171],\n",
              "                        [ 0.0056,  0.0174, -0.0209],\n",
              "                        [-0.0179,  0.0173, -0.0081]],\n",
              "              \n",
              "                       [[-0.0085, -0.0134,  0.0161],\n",
              "                        [ 0.0214,  0.0039, -0.0136],\n",
              "                        [-0.0129,  0.0148, -0.0082]],\n",
              "              \n",
              "                       [[ 0.0143,  0.0198,  0.0237],\n",
              "                        [ 0.0101, -0.0240, -0.0063],\n",
              "                        [ 0.0020, -0.0083,  0.0063]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0162, -0.0169,  0.0144],\n",
              "                        [-0.0077,  0.0215, -0.0032],\n",
              "                        [ 0.0155, -0.0215,  0.0062]],\n",
              "              \n",
              "                       [[-0.0249,  0.0126,  0.0065],\n",
              "                        [-0.0236, -0.0204,  0.0228],\n",
              "                        [ 0.0080,  0.0244,  0.0243]],\n",
              "              \n",
              "                       [[ 0.0081,  0.0093,  0.0104],\n",
              "                        [ 0.0033, -0.0203, -0.0157],\n",
              "                        [-0.0156, -0.0213, -0.0229]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0136,  0.0238, -0.0122],\n",
              "                        [ 0.0183,  0.0251, -0.0127],\n",
              "                        [ 0.0165, -0.0063,  0.0076]],\n",
              "              \n",
              "                       [[-0.0193,  0.0064,  0.0230],\n",
              "                        [ 0.0195,  0.0224,  0.0129],\n",
              "                        [-0.0015,  0.0035, -0.0053]],\n",
              "              \n",
              "                       [[ 0.0167,  0.0093, -0.0088],\n",
              "                        [ 0.0112, -0.0058, -0.0016],\n",
              "                        [ 0.0163, -0.0127, -0.0211]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0091, -0.0028,  0.0201],\n",
              "                        [ 0.0245,  0.0234,  0.0018],\n",
              "                        [-0.0216,  0.0169, -0.0077]],\n",
              "              \n",
              "                       [[-0.0244,  0.0040, -0.0001],\n",
              "                        [ 0.0137, -0.0047,  0.0016],\n",
              "                        [ 0.0146,  0.0029,  0.0076]],\n",
              "              \n",
              "                       [[ 0.0105,  0.0120,  0.0028],\n",
              "                        [-0.0243,  0.0220, -0.0118],\n",
              "                        [-0.0180, -0.0166, -0.0012]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0166, -0.0106, -0.0139],\n",
              "                        [-0.0154, -0.0111, -0.0245],\n",
              "                        [ 0.0008, -0.0085,  0.0138]],\n",
              "              \n",
              "                       [[ 0.0028,  0.0224,  0.0243],\n",
              "                        [ 0.0236,  0.0129,  0.0245],\n",
              "                        [-0.0062, -0.0199,  0.0245]],\n",
              "              \n",
              "                       [[ 0.0103,  0.0035,  0.0178],\n",
              "                        [ 0.0019,  0.0068,  0.0022],\n",
              "                        [ 0.0152,  0.0116, -0.0134]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0099, -0.0049, -0.0227],\n",
              "                        [-0.0221,  0.0124,  0.0039],\n",
              "                        [ 0.0012, -0.0131,  0.0023]],\n",
              "              \n",
              "                       [[-0.0086, -0.0103, -0.0177],\n",
              "                        [-0.0013,  0.0117,  0.0191],\n",
              "                        [-0.0168, -0.0165,  0.0062]],\n",
              "              \n",
              "                       [[ 0.0170, -0.0175,  0.0245],\n",
              "                        [ 0.0222,  0.0131, -0.0115],\n",
              "                        [ 0.0109, -0.0112, -0.0158]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0037, -0.0183,  0.0176],\n",
              "                        [ 0.0207, -0.0201,  0.0069],\n",
              "                        [ 0.0184,  0.0111, -0.0105]],\n",
              "              \n",
              "                       [[-0.0144, -0.0197,  0.0075],\n",
              "                        [-0.0059,  0.0154,  0.0035],\n",
              "                        [-0.0005, -0.0099, -0.0251]],\n",
              "              \n",
              "                       [[ 0.0159, -0.0196, -0.0029],\n",
              "                        [-0.0193, -0.0178,  0.0249],\n",
              "                        [ 0.0205,  0.0033,  0.0173]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0142, -0.0077, -0.0176],\n",
              "                        [-0.0150,  0.0168,  0.0197],\n",
              "                        [-0.0153,  0.0077, -0.0034]],\n",
              "              \n",
              "                       [[ 0.0025, -0.0235, -0.0214],\n",
              "                        [-0.0134, -0.0060,  0.0131],\n",
              "                        [-0.0033,  0.0029,  0.0135]],\n",
              "              \n",
              "                       [[ 0.0199,  0.0004,  0.0201],\n",
              "                        [-0.0077,  0.0064, -0.0099],\n",
              "                        [-0.0199, -0.0089,  0.0175]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0142,  0.0053, -0.0093],\n",
              "                        [ 0.0014,  0.0146,  0.0114],\n",
              "                        [-0.0002,  0.0158, -0.0250]],\n",
              "              \n",
              "                       [[ 0.0014, -0.0073,  0.0044],\n",
              "                        [-0.0004,  0.0159,  0.0099],\n",
              "                        [-0.0225,  0.0223, -0.0251]],\n",
              "              \n",
              "                       [[-0.0054, -0.0126,  0.0223],\n",
              "                        [-0.0207, -0.0235, -0.0205],\n",
              "                        [-0.0122, -0.0242, -0.0039]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0233,  0.0085, -0.0188],\n",
              "                        [-0.0180,  0.0034,  0.0117],\n",
              "                        [-0.0030, -0.0002, -0.0057]],\n",
              "              \n",
              "                       [[-0.0190,  0.0017, -0.0195],\n",
              "                        [ 0.0007, -0.0020,  0.0018],\n",
              "                        [ 0.0046,  0.0033, -0.0083]],\n",
              "              \n",
              "                       [[ 0.0076,  0.0240, -0.0088],\n",
              "                        [-0.0204,  0.0202,  0.0019],\n",
              "                        [-0.0049, -0.0081,  0.0020]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0142,  0.0161,  0.0073],\n",
              "                        [-0.0077, -0.0193,  0.0025],\n",
              "                        [-0.0205, -0.0224,  0.0124]],\n",
              "              \n",
              "                       [[-0.0081,  0.0079,  0.0091],\n",
              "                        [ 0.0140, -0.0012,  0.0200],\n",
              "                        [ 0.0249, -0.0220,  0.0126]],\n",
              "              \n",
              "                       [[ 0.0123, -0.0220, -0.0253],\n",
              "                        [ 0.0244,  0.0124, -0.0016],\n",
              "                        [-0.0232,  0.0096, -0.0080]]]])),\n",
              "             ('conv_block5.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block5.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv_block5.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block5.1.running_mean',\n",
              "              tensor([-0.0342,  0.0062, -0.0171,  0.0327,  0.0373, -0.1065,  0.0061,  0.0143,\n",
              "                       0.0464,  0.0300, -0.0389,  0.0403,  0.0314,  0.0561, -0.0863,  0.0772,\n",
              "                      -0.0147, -0.0068,  0.0459,  0.1355,  0.0651,  0.0299, -0.0207, -0.0241,\n",
              "                      -0.0649, -0.0325, -0.0965,  0.0328, -0.0077, -0.0117,  0.0044, -0.0135,\n",
              "                      -0.0529,  0.0603,  0.0668, -0.0030, -0.0096, -0.0124, -0.0465, -0.0643,\n",
              "                      -0.0198, -0.0300, -0.0248, -0.0088,  0.0656, -0.0086,  0.0302,  0.0666,\n",
              "                      -0.0986, -0.1016, -0.0268, -0.0435, -0.0328,  0.0212,  0.0686, -0.0097,\n",
              "                      -0.0053, -0.0014, -0.0017,  0.0485,  0.1051,  0.0010,  0.0047,  0.0598,\n",
              "                      -0.0035,  0.0326,  0.0387,  0.0319, -0.0498,  0.1210, -0.0022, -0.0488,\n",
              "                      -0.0033,  0.0515, -0.0036,  0.0734, -0.0490,  0.0084, -0.0412, -0.0446,\n",
              "                      -0.0346, -0.0105,  0.0303, -0.0149, -0.0329, -0.0117, -0.0548, -0.0179,\n",
              "                       0.0275, -0.0704, -0.0358, -0.0492, -0.0707, -0.0554, -0.0384, -0.1412,\n",
              "                       0.0317,  0.0025,  0.0230,  0.0522,  0.0347, -0.0611,  0.0404,  0.0645,\n",
              "                       0.0805,  0.0084, -0.0200,  0.0242,  0.0435,  0.0359,  0.1095,  0.0003,\n",
              "                      -0.0182, -0.0483, -0.0181,  0.0343,  0.0240, -0.0229, -0.0612, -0.0202,\n",
              "                       0.0105, -0.0035, -0.0539,  0.0452,  0.0217, -0.0250, -0.0818,  0.0715,\n",
              "                       0.1014,  0.0356,  0.0552, -0.0530, -0.1106, -0.0328, -0.0499, -0.0120,\n",
              "                      -0.0212,  0.0337, -0.0073,  0.0925,  0.0173,  0.0847,  0.0237, -0.0950,\n",
              "                      -0.1251,  0.0886, -0.0129, -0.0007,  0.0603, -0.0187,  0.0026, -0.0573,\n",
              "                       0.0255,  0.0041, -0.0281,  0.0117,  0.0390,  0.0068, -0.0407,  0.0052,\n",
              "                      -0.0108, -0.0244,  0.0482,  0.0403, -0.0870, -0.0223,  0.0244, -0.0010,\n",
              "                       0.0487, -0.0178,  0.0288, -0.1106, -0.0169,  0.0405, -0.0503,  0.0082,\n",
              "                       0.0396,  0.0318, -0.0181,  0.0988,  0.0231,  0.0263,  0.0118, -0.0184,\n",
              "                       0.0202, -0.0703,  0.0036,  0.0528, -0.1038, -0.0237, -0.0283,  0.0360,\n",
              "                      -0.1047, -0.0730,  0.0288,  0.0358,  0.0386, -0.1027, -0.1101,  0.0211,\n",
              "                      -0.0015, -0.0975,  0.0024, -0.0003, -0.1406, -0.0038,  0.1036,  0.0516,\n",
              "                       0.0271,  0.0612,  0.0123,  0.0412, -0.0067, -0.0334,  0.0654,  0.0522,\n",
              "                      -0.0013,  0.0202, -0.0383, -0.0508, -0.0357, -0.0162, -0.0256,  0.0701,\n",
              "                      -0.0790, -0.0710, -0.0093, -0.0387,  0.0239, -0.0187, -0.0045,  0.0022,\n",
              "                      -0.0203, -0.0125,  0.0361,  0.0492,  0.0049, -0.0589,  0.0184, -0.0553,\n",
              "                      -0.0092,  0.0203,  0.0131,  0.0249,  0.0690, -0.0016,  0.1052,  0.0431,\n",
              "                      -0.0528, -0.0373, -0.0250, -0.0257,  0.0899, -0.0003, -0.0315,  0.0185,\n",
              "                       0.0124,  0.0584,  0.0338, -0.0348, -0.0377,  0.0328,  0.0164,  0.0742,\n",
              "                      -0.0037, -0.0069, -0.0355, -0.0944,  0.0084,  0.0475, -0.0356,  0.0013,\n",
              "                      -0.0224, -0.0065, -0.0322,  0.0156, -0.0157, -0.0420, -0.0471, -0.0200,\n",
              "                       0.0570,  0.0065,  0.0068,  0.0639,  0.0179,  0.0218, -0.0382, -0.0252,\n",
              "                       0.0997, -0.0182, -0.0519,  0.0729,  0.0550,  0.0183,  0.0104,  0.0306,\n",
              "                      -0.0338, -0.0268, -0.0624, -0.0344,  0.0086,  0.0108,  0.0978,  0.0134,\n",
              "                      -0.0176,  0.0372, -0.0111, -0.1041, -0.0314, -0.0407, -0.0588,  0.0480,\n",
              "                      -0.0018,  0.1059, -0.0563, -0.0840, -0.0079, -0.0210,  0.0128, -0.0764,\n",
              "                       0.0546, -0.0299, -0.0633, -0.0306, -0.0876,  0.0522,  0.0012,  0.0223,\n",
              "                      -0.0510,  0.0021, -0.0171,  0.0228, -0.0806, -0.0340,  0.0075, -0.0899,\n",
              "                       0.0311,  0.0544, -0.0565,  0.0185,  0.0572, -0.0239,  0.0362,  0.0204,\n",
              "                       0.0073,  0.0560, -0.0139, -0.0200, -0.0188, -0.0329, -0.0032,  0.0575,\n",
              "                      -0.0186, -0.1314,  0.0333,  0.0766,  0.0272,  0.0338, -0.0577,  0.0094,\n",
              "                      -0.0226,  0.0302, -0.0102,  0.0120, -0.0051,  0.0588,  0.0917,  0.0215,\n",
              "                       0.0804,  0.0312,  0.0600,  0.0014,  0.0590,  0.0303, -0.0324, -0.0236,\n",
              "                       0.0293,  0.0341,  0.0329, -0.0192, -0.0036, -0.0744, -0.0743, -0.0081,\n",
              "                      -0.0948,  0.0557, -0.0083, -0.0829, -0.1101,  0.0087, -0.0518, -0.0054,\n",
              "                       0.0532, -0.0296, -0.0208, -0.0684, -0.0723,  0.0662, -0.0451,  0.1611,\n",
              "                      -0.0437,  0.0051,  0.0152, -0.0158, -0.0070,  0.0452, -0.0232, -0.0638,\n",
              "                       0.0154, -0.0426, -0.0037,  0.0304,  0.0017, -0.0301,  0.0217,  0.0269,\n",
              "                       0.0435,  0.0145, -0.0401,  0.0356, -0.0039,  0.0607, -0.0952,  0.0346,\n",
              "                       0.0177, -0.0189,  0.0198,  0.0314,  0.0161, -0.0077, -0.0250, -0.0102,\n",
              "                      -0.0678,  0.0111, -0.0770,  0.0305,  0.0559, -0.0066,  0.0479,  0.0347,\n",
              "                      -0.0596, -0.0406, -0.0238,  0.0137,  0.0246, -0.0054, -0.0518,  0.0149,\n",
              "                       0.0495, -0.0063, -0.0378, -0.0160, -0.0771, -0.0379, -0.1007,  0.2019,\n",
              "                      -0.0671,  0.0045, -0.0449, -0.0077,  0.0138, -0.0004, -0.0495, -0.0195,\n",
              "                      -0.0515, -0.0880,  0.0003, -0.0838,  0.0225,  0.0052, -0.0449,  0.0042,\n",
              "                       0.0342, -0.0473, -0.0171,  0.0527,  0.0437,  0.0029, -0.0583, -0.0189,\n",
              "                      -0.0333, -0.0698, -0.0300,  0.0280,  0.0174, -0.0457, -0.0241, -0.0395,\n",
              "                       0.1195,  0.0080,  0.0050, -0.0201,  0.0221,  0.0717,  0.0020,  0.0359,\n",
              "                       0.0484,  0.0140,  0.0403,  0.0598, -0.0282, -0.0129,  0.0140,  0.0412,\n",
              "                       0.0176, -0.0357, -0.0185,  0.0537,  0.0125,  0.0638,  0.0316,  0.0008])),\n",
              "             ('conv_block5.1.running_var',\n",
              "              tensor([0.9523, 0.9342, 0.9279, 0.9199, 0.9207, 0.9214, 0.9371, 0.9591, 0.9386,\n",
              "                      0.9366, 0.9259, 0.9866, 0.9657, 0.9467, 0.9409, 0.9292, 0.9381, 0.9399,\n",
              "                      0.9332, 0.9268, 0.9436, 0.9246, 0.9531, 1.0442, 0.9659, 0.9344, 0.9656,\n",
              "                      0.9215, 0.9237, 0.9369, 0.9359, 0.9523, 0.9397, 0.9171, 0.9228, 0.9920,\n",
              "                      0.9546, 0.9257, 0.9335, 0.9225, 0.9844, 0.9496, 0.9465, 1.0410, 0.9304,\n",
              "                      0.9280, 0.9442, 0.9418, 0.9248, 0.9744, 0.9185, 0.9362, 0.9305, 0.9350,\n",
              "                      0.9730, 0.9320, 0.9303, 0.9238, 0.9248, 0.9321, 0.9484, 0.9295, 0.9525,\n",
              "                      0.9342, 0.9276, 0.9422, 0.9405, 0.9209, 0.9343, 0.9591, 0.9334, 0.9334,\n",
              "                      0.9395, 0.9346, 0.9330, 0.9273, 0.9328, 0.9268, 0.9478, 0.9349, 0.9624,\n",
              "                      0.9303, 0.9402, 0.9399, 0.9325, 0.9371, 0.9496, 0.9268, 0.9377, 0.9240,\n",
              "                      0.9704, 0.9303, 0.9310, 0.9277, 0.9274, 0.9805, 0.9343, 0.9273, 1.0059,\n",
              "                      0.9406, 0.9228, 0.9980, 0.9317, 1.0083, 0.9724, 0.9328, 0.9241, 0.9333,\n",
              "                      0.9531, 0.9247, 0.9966, 0.9320, 0.9286, 0.9158, 0.9541, 0.9428, 0.9366,\n",
              "                      0.9206, 0.9363, 0.9600, 0.9449, 0.9393, 0.9435, 0.9745, 0.9440, 0.9789,\n",
              "                      0.9622, 0.9294, 1.0077, 0.9309, 0.9223, 0.9311, 0.9203, 0.9302, 0.9342,\n",
              "                      0.9343, 0.9194, 0.9319, 0.9304, 0.9216, 0.9399, 0.9352, 0.9401, 0.9472,\n",
              "                      0.9485, 0.9284, 0.9528, 0.9547, 0.9312, 0.9288, 0.9622, 0.9371, 0.9901,\n",
              "                      0.9741, 0.9302, 0.9236, 0.9281, 0.9417, 0.9473, 0.9292, 0.9223, 0.9665,\n",
              "                      0.9435, 0.9256, 0.9893, 0.9335, 0.9331, 0.9386, 0.9236, 0.9260, 0.9278,\n",
              "                      0.9336, 0.9686, 0.9378, 0.9332, 0.9344, 0.9440, 0.9329, 0.9337, 0.9522,\n",
              "                      0.9258, 0.9397, 0.9276, 0.9200, 0.9546, 0.9348, 0.9401, 0.9583, 0.9596,\n",
              "                      0.9268, 0.9286, 0.9357, 0.9489, 0.9490, 0.9189, 0.9538, 0.9387, 0.9253,\n",
              "                      0.9252, 0.9459, 0.9289, 0.9234, 0.9481, 0.9307, 0.9627, 0.9328, 0.9761,\n",
              "                      0.9291, 0.9291, 0.9244, 0.9211, 0.9827, 0.9315, 0.9182, 0.9279, 0.9222,\n",
              "                      0.9238, 0.9318, 0.9309, 0.9483, 0.9435, 0.9455, 0.9331, 0.9335, 0.9362,\n",
              "                      0.9231, 0.9364, 0.9458, 0.9395, 0.9282, 0.9348, 0.9309, 0.9222, 0.9323,\n",
              "                      0.9478, 0.9391, 0.9504, 0.9314, 0.9551, 0.9310, 0.9259, 0.9231, 0.9181,\n",
              "                      0.9575, 0.9629, 0.9521, 0.9700, 0.9248, 0.9286, 0.9442, 0.9223, 0.9615,\n",
              "                      0.9209, 0.9289, 0.9763, 0.9792, 0.9887, 0.9228, 0.9290, 0.9251, 0.9521,\n",
              "                      0.9273, 0.9794, 0.9188, 0.9509, 0.9557, 0.9398, 0.9461, 0.9354, 0.9327,\n",
              "                      0.9279, 0.9180, 0.9374, 0.9373, 0.9355, 0.9272, 0.9413, 0.9564, 0.9414,\n",
              "                      0.9192, 0.9936, 0.9435, 0.9358, 0.9601, 0.9650, 0.9366, 0.9760, 0.9259,\n",
              "                      0.9304, 0.9295, 0.9500, 0.9464, 0.9416, 0.9418, 0.9431, 0.9220, 0.9263,\n",
              "                      0.9423, 0.9738, 0.9493, 0.9452, 0.9366, 0.9434, 0.9184, 0.9237, 0.9201,\n",
              "                      0.9377, 0.9314, 0.9198, 0.9288, 0.9293, 0.9546, 0.9306, 0.9326, 0.9344,\n",
              "                      0.9254, 0.9556, 0.9343, 0.9317, 0.9386, 0.9217, 0.9336, 0.9404, 0.9183,\n",
              "                      0.9339, 0.9365, 0.9798, 0.9289, 0.9195, 0.9860, 0.9434, 0.9414, 0.9469,\n",
              "                      0.9198, 0.9335, 0.9450, 0.9173, 0.9225, 0.9785, 0.9249, 0.9457, 0.9256,\n",
              "                      0.9340, 0.9296, 0.9821, 0.9342, 0.9182, 0.9296, 0.9310, 0.9361, 0.9284,\n",
              "                      0.9440, 0.9371, 0.9391, 0.9391, 0.9513, 0.9268, 0.9401, 0.9279, 0.9249,\n",
              "                      0.9365, 0.9192, 0.9349, 0.9591, 0.9258, 0.9585, 0.9361, 0.9352, 0.9353,\n",
              "                      0.9637, 0.9590, 0.9412, 0.9234, 0.9696, 0.9538, 0.9486, 0.9282, 0.9634,\n",
              "                      0.9209, 0.9216, 0.9539, 0.9885, 0.9243, 0.9261, 0.9227, 0.9477, 0.9309,\n",
              "                      0.9766, 0.9298, 0.9288, 0.9232, 0.9365, 0.9345, 0.9810, 0.9281, 0.9286,\n",
              "                      0.9488, 0.9211, 0.9301, 0.9273, 0.9453, 0.9326, 0.9406, 0.9248, 0.9340,\n",
              "                      0.9290, 0.9299, 0.9281, 0.9568, 0.9251, 0.9381, 0.9391, 0.9210, 0.9224,\n",
              "                      0.9154, 0.9254, 0.9277, 0.9306, 0.9232, 1.0202, 0.9386, 0.9588, 0.9245,\n",
              "                      0.9501, 0.9392, 0.9256, 0.9431, 0.9245, 0.9217, 0.9448, 0.9392, 0.9406,\n",
              "                      0.9801, 0.9701, 0.9244, 0.9530, 0.9306, 0.9867, 0.9231, 0.9251, 0.9330,\n",
              "                      0.9326, 0.9370, 0.9424, 0.9288, 0.9506, 0.9542, 0.9527, 0.9426, 0.9643,\n",
              "                      0.9260, 0.9210, 0.9758, 0.9579, 0.9347, 0.9436, 0.9354, 0.9286, 0.9532,\n",
              "                      0.9315, 0.9294, 0.9277, 0.9598, 0.9529, 0.9427, 0.9633, 0.9239, 0.9226,\n",
              "                      0.9452, 0.9318, 0.9543, 0.9586, 0.9374, 0.9197, 0.9372, 0.9882, 0.9283,\n",
              "                      0.9397, 0.9581, 0.9212, 0.9215, 0.9383, 0.9280, 0.9293, 0.9749, 0.9221,\n",
              "                      0.9274, 0.9536, 0.9460, 0.9607, 0.9179, 0.9186, 0.9935, 0.9644, 0.9313,\n",
              "                      0.9296, 0.9318, 0.9186, 0.9231, 0.9489, 0.9417, 0.9201, 1.0119, 0.9352,\n",
              "                      0.9790, 0.9180, 0.9410, 0.9935, 0.9337, 0.9296, 0.9234, 0.9541])),\n",
              "             ('conv_block5.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv_block5.3.weight',\n",
              "              tensor([[[[-0.0251, -0.0055,  0.0040],\n",
              "                        [ 0.0002, -0.0129,  0.0047],\n",
              "                        [-0.0101,  0.0193,  0.0229]],\n",
              "              \n",
              "                       [[ 0.0193, -0.0249, -0.0125],\n",
              "                        [ 0.0127,  0.0004,  0.0139],\n",
              "                        [-0.0148, -0.0101,  0.0116]],\n",
              "              \n",
              "                       [[-0.0199,  0.0112,  0.0225],\n",
              "                        [-0.0208,  0.0141, -0.0211],\n",
              "                        [ 0.0113,  0.0229,  0.0085]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0015,  0.0233,  0.0045],\n",
              "                        [-0.0226, -0.0165,  0.0050],\n",
              "                        [-0.0234,  0.0199, -0.0047]],\n",
              "              \n",
              "                       [[-0.0194,  0.0198,  0.0185],\n",
              "                        [ 0.0208,  0.0048, -0.0201],\n",
              "                        [ 0.0134, -0.0210, -0.0183]],\n",
              "              \n",
              "                       [[-0.0127,  0.0203,  0.0180],\n",
              "                        [ 0.0067, -0.0190,  0.0136],\n",
              "                        [ 0.0041, -0.0128, -0.0055]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0131, -0.0015,  0.0142],\n",
              "                        [-0.0164, -0.0039,  0.0135],\n",
              "                        [ 0.0038,  0.0191, -0.0038]],\n",
              "              \n",
              "                       [[ 0.0065,  0.0126,  0.0029],\n",
              "                        [ 0.0169, -0.0110,  0.0045],\n",
              "                        [-0.0091, -0.0234,  0.0193]],\n",
              "              \n",
              "                       [[-0.0055,  0.0067, -0.0176],\n",
              "                        [-0.0219,  0.0213,  0.0212],\n",
              "                        [ 0.0227, -0.0073, -0.0178]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0146, -0.0165, -0.0111],\n",
              "                        [-0.0198,  0.0097,  0.0144],\n",
              "                        [ 0.0199,  0.0196,  0.0241]],\n",
              "              \n",
              "                       [[ 0.0015,  0.0017, -0.0064],\n",
              "                        [-0.0110, -0.0054, -0.0171],\n",
              "                        [-0.0173, -0.0121, -0.0161]],\n",
              "              \n",
              "                       [[ 0.0212, -0.0152,  0.0182],\n",
              "                        [ 0.0044,  0.0238,  0.0176],\n",
              "                        [-0.0236, -0.0217, -0.0245]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0232,  0.0066, -0.0012],\n",
              "                        [-0.0114, -0.0151, -0.0200],\n",
              "                        [-0.0197,  0.0228,  0.0034]],\n",
              "              \n",
              "                       [[ 0.0136,  0.0202, -0.0228],\n",
              "                        [ 0.0056, -0.0020,  0.0060],\n",
              "                        [-0.0183, -0.0166, -0.0171]],\n",
              "              \n",
              "                       [[ 0.0205,  0.0075, -0.0139],\n",
              "                        [-0.0236, -0.0221,  0.0210],\n",
              "                        [-0.0153,  0.0105, -0.0003]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0092, -0.0156, -0.0246],\n",
              "                        [ 0.0136, -0.0158,  0.0227],\n",
              "                        [-0.0045, -0.0235,  0.0150]],\n",
              "              \n",
              "                       [[-0.0156,  0.0201, -0.0236],\n",
              "                        [-0.0077,  0.0208,  0.0034],\n",
              "                        [-0.0078,  0.0137, -0.0115]],\n",
              "              \n",
              "                       [[-0.0226, -0.0252, -0.0114],\n",
              "                        [ 0.0018, -0.0163,  0.0056],\n",
              "                        [ 0.0011,  0.0139, -0.0007]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0011, -0.0113,  0.0235],\n",
              "                        [-0.0130,  0.0060, -0.0077],\n",
              "                        [ 0.0227,  0.0242, -0.0088]],\n",
              "              \n",
              "                       [[-0.0106, -0.0238,  0.0073],\n",
              "                        [ 0.0135,  0.0070, -0.0151],\n",
              "                        [-0.0020, -0.0063, -0.0167]],\n",
              "              \n",
              "                       [[ 0.0158, -0.0153,  0.0165],\n",
              "                        [ 0.0076, -0.0045, -0.0236],\n",
              "                        [ 0.0193, -0.0184, -0.0056]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0063,  0.0205, -0.0041],\n",
              "                        [ 0.0252, -0.0224,  0.0036],\n",
              "                        [ 0.0093,  0.0052, -0.0118]],\n",
              "              \n",
              "                       [[-0.0117, -0.0178,  0.0098],\n",
              "                        [-0.0050,  0.0090, -0.0105],\n",
              "                        [ 0.0022,  0.0083, -0.0079]],\n",
              "              \n",
              "                       [[ 0.0162,  0.0097, -0.0177],\n",
              "                        [-0.0001,  0.0057,  0.0172],\n",
              "                        [ 0.0199,  0.0096, -0.0083]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0245,  0.0116,  0.0024],\n",
              "                        [ 0.0126, -0.0190,  0.0182],\n",
              "                        [ 0.0117, -0.0235,  0.0192]],\n",
              "              \n",
              "                       [[ 0.0116,  0.0108, -0.0016],\n",
              "                        [ 0.0225, -0.0042, -0.0017],\n",
              "                        [-0.0254,  0.0010, -0.0050]],\n",
              "              \n",
              "                       [[ 0.0011,  0.0255,  0.0089],\n",
              "                        [-0.0111,  0.0186, -0.0104],\n",
              "                        [-0.0084, -0.0050,  0.0145]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0251, -0.0192,  0.0108],\n",
              "                        [ 0.0103, -0.0219, -0.0233],\n",
              "                        [ 0.0240, -0.0008, -0.0075]],\n",
              "              \n",
              "                       [[-0.0154,  0.0059,  0.0068],\n",
              "                        [-0.0031,  0.0245,  0.0110],\n",
              "                        [-0.0131, -0.0197,  0.0209]],\n",
              "              \n",
              "                       [[ 0.0107,  0.0171,  0.0078],\n",
              "                        [-0.0158,  0.0163,  0.0234],\n",
              "                        [ 0.0211,  0.0233, -0.0221]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0211, -0.0098, -0.0135],\n",
              "                        [ 0.0219,  0.0154, -0.0014],\n",
              "                        [ 0.0092, -0.0192, -0.0020]],\n",
              "              \n",
              "                       [[-0.0039, -0.0184, -0.0183],\n",
              "                        [ 0.0069, -0.0202,  0.0135],\n",
              "                        [-0.0182, -0.0118, -0.0193]],\n",
              "              \n",
              "                       [[ 0.0113, -0.0094, -0.0244],\n",
              "                        [ 0.0125,  0.0089, -0.0127],\n",
              "                        [-0.0145, -0.0089, -0.0063]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0065,  0.0045,  0.0211],\n",
              "                        [ 0.0244,  0.0062,  0.0134],\n",
              "                        [-0.0121, -0.0003,  0.0101]],\n",
              "              \n",
              "                       [[-0.0202, -0.0182, -0.0118],\n",
              "                        [-0.0241,  0.0170, -0.0105],\n",
              "                        [-0.0155, -0.0124,  0.0028]],\n",
              "              \n",
              "                       [[-0.0086, -0.0043,  0.0109],\n",
              "                        [ 0.0141, -0.0009,  0.0162],\n",
              "                        [ 0.0058, -0.0188, -0.0014]]]])),\n",
              "             ('conv_block5.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block5.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv_block5.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv_block5.4.running_mean',\n",
              "              tensor([-1.8894e-02,  8.6377e-04, -4.9964e-03, -1.2873e-02,  2.5212e-02,\n",
              "                       1.3813e-02,  9.3731e-03, -4.1825e-02,  1.2297e-02,  1.2078e-02,\n",
              "                      -3.2948e-04,  6.7474e-03,  2.1451e-02,  1.5914e-02, -6.1766e-03,\n",
              "                      -1.4069e-02,  1.7654e-02,  9.6168e-03,  3.8867e-03, -1.1577e-02,\n",
              "                      -2.8662e-02,  2.5118e-02,  1.6927e-02, -3.2478e-02, -4.7406e-02,\n",
              "                      -1.2990e-02, -3.0797e-02,  2.7096e-03,  2.5882e-02,  4.8889e-03,\n",
              "                      -6.7951e-03,  5.1498e-03,  2.2590e-03,  3.0136e-02,  1.9928e-02,\n",
              "                      -3.8829e-02, -2.3066e-02,  1.2914e-02, -1.3213e-03,  7.2013e-04,\n",
              "                      -2.0784e-02, -2.7354e-02, -3.8374e-02,  2.3651e-02,  1.3101e-02,\n",
              "                      -3.0524e-02,  1.4562e-04, -1.6649e-02, -1.1701e-02, -3.1504e-02,\n",
              "                      -4.8761e-03, -2.1704e-02,  3.0050e-02,  3.3791e-02,  1.7955e-02,\n",
              "                      -1.6130e-02, -1.5715e-02,  2.7782e-02, -2.3330e-03, -2.1229e-02,\n",
              "                      -1.6135e-03, -1.9602e-02, -3.8378e-03, -9.6395e-03,  2.3381e-02,\n",
              "                      -1.2301e-03, -1.5959e-03,  9.4722e-04, -2.7888e-03,  1.6923e-02,\n",
              "                       6.7632e-03, -1.5579e-02,  2.1268e-02, -1.4587e-03, -1.3778e-03,\n",
              "                       3.7006e-02,  6.1985e-04,  1.5660e-02,  5.2131e-03, -2.5636e-02,\n",
              "                       4.2995e-03, -3.7344e-02,  2.8395e-02, -2.4721e-02,  9.6778e-03,\n",
              "                       3.5380e-02, -1.6952e-03,  4.5046e-03, -6.6027e-04,  3.2794e-02,\n",
              "                      -1.6875e-02, -4.3595e-03,  3.4685e-04, -1.7099e-02,  8.9950e-03,\n",
              "                      -1.6691e-02, -2.0313e-02, -1.5449e-02,  3.0216e-02,  2.3335e-02,\n",
              "                       7.7870e-03, -5.1263e-02, -4.1987e-02, -1.5897e-02,  4.6486e-02,\n",
              "                      -4.3919e-03,  1.4318e-02,  1.8122e-02, -5.3939e-03, -8.3550e-03,\n",
              "                       3.3341e-02, -2.7195e-02,  1.5391e-02,  2.9346e-02, -4.4751e-02,\n",
              "                      -3.5989e-02,  8.2020e-04,  1.0387e-02,  2.0666e-02, -3.4978e-02,\n",
              "                      -5.9819e-02, -4.8634e-02, -4.3409e-02, -2.7735e-02,  2.0484e-02,\n",
              "                       2.0165e-02, -1.4063e-02,  9.7212e-03,  2.4641e-02,  2.4152e-02,\n",
              "                       7.0259e-03,  3.4028e-03,  1.9972e-03, -3.1776e-02, -1.3284e-02,\n",
              "                       1.9087e-02, -2.5087e-02,  3.8715e-03,  1.5409e-02, -3.6915e-02,\n",
              "                      -3.4393e-03,  8.9060e-03,  1.7625e-02,  1.0253e-02,  1.7534e-02,\n",
              "                       7.4781e-03, -3.1323e-02, -3.3260e-02, -4.8190e-03, -1.2088e-03,\n",
              "                       1.9733e-02, -5.3445e-03,  1.3665e-02,  1.0606e-02,  2.1677e-02,\n",
              "                       2.9811e-02,  1.6487e-02,  3.2741e-02,  5.0626e-02,  1.5020e-02,\n",
              "                      -1.6314e-02,  3.4072e-02, -3.2096e-02,  3.7658e-02, -2.5882e-02,\n",
              "                      -4.0285e-02,  4.5624e-02,  8.9574e-03, -1.2870e-02, -3.8356e-02,\n",
              "                       6.9149e-03, -1.5240e-02,  2.4971e-02,  3.6592e-02, -3.9825e-03,\n",
              "                       1.7847e-02,  1.4022e-02, -1.9528e-03,  1.0497e-03, -1.7179e-02,\n",
              "                      -5.4090e-03, -1.4562e-02,  1.4433e-02, -9.5877e-03, -6.0966e-02,\n",
              "                       8.3879e-03, -4.7794e-03, -1.1818e-02,  5.6441e-03,  4.8059e-02,\n",
              "                       5.4034e-03, -2.7524e-02,  1.9144e-02, -2.1385e-02, -1.3935e-02,\n",
              "                       2.5124e-02, -1.0069e-02, -3.1638e-02,  2.5825e-03,  1.0961e-02,\n",
              "                      -3.3200e-02,  8.5002e-03, -1.9781e-02, -4.2946e-02, -1.5671e-03,\n",
              "                      -1.6037e-02, -4.6593e-02,  4.1281e-02, -3.9201e-02, -1.4879e-02,\n",
              "                      -3.1630e-02,  2.0341e-02,  7.5594e-03,  1.6679e-02,  4.4161e-03,\n",
              "                       1.0041e-02, -6.7684e-03, -4.8362e-02,  4.8900e-02,  2.2457e-02,\n",
              "                      -1.4557e-02,  2.7522e-02,  1.5190e-02, -6.0877e-04,  5.6920e-03,\n",
              "                      -7.5301e-03, -2.1147e-03,  1.5498e-02, -5.2023e-02, -1.6355e-02,\n",
              "                      -7.4839e-03, -3.1864e-02,  2.8161e-02, -2.4365e-02, -1.7505e-02,\n",
              "                      -1.6299e-02,  2.6781e-02,  8.9610e-04, -8.3645e-03,  1.3960e-02,\n",
              "                      -3.6780e-02,  1.2727e-02,  6.3466e-02, -3.2620e-02,  6.0727e-03,\n",
              "                      -1.0398e-02, -1.9891e-02, -1.8251e-03,  9.3628e-03, -4.4291e-03,\n",
              "                      -3.6516e-02,  2.4676e-02,  1.9781e-02,  2.4212e-02,  4.4589e-02,\n",
              "                       2.9759e-02, -2.9158e-02,  5.5904e-03, -1.3388e-02,  1.7165e-02,\n",
              "                       4.9480e-02, -2.4053e-03, -4.1138e-02, -6.1952e-03,  2.6436e-02,\n",
              "                       1.9938e-02,  1.7578e-03,  2.5539e-02, -1.9417e-02,  1.4326e-02,\n",
              "                       2.0631e-02, -4.8511e-02,  3.2665e-02,  4.2039e-02,  2.2250e-02,\n",
              "                      -2.7940e-02,  6.4380e-02,  1.3755e-03,  6.7608e-02, -1.8283e-03,\n",
              "                      -9.9526e-03,  2.8339e-02, -7.4063e-03, -3.7955e-02, -7.5605e-03,\n",
              "                      -3.9588e-03,  3.1102e-02,  3.3639e-02,  3.4680e-02,  3.3449e-03,\n",
              "                      -2.8911e-02, -4.7293e-03,  2.2029e-02, -8.2021e-03, -5.7143e-03,\n",
              "                       5.9651e-03, -3.5050e-02, -4.7254e-03, -1.2674e-02,  1.7098e-02,\n",
              "                      -7.9017e-03,  1.9321e-02,  1.0454e-02,  4.6659e-03, -1.7979e-02,\n",
              "                       4.7407e-03,  9.5078e-04, -3.3600e-02,  6.6987e-05,  1.7847e-02,\n",
              "                      -4.8154e-03, -1.0222e-02,  9.3040e-03,  2.2812e-02, -1.2974e-03,\n",
              "                      -5.1307e-03,  1.7778e-03,  1.1326e-02,  1.6049e-02,  5.0770e-03,\n",
              "                      -2.4634e-02,  1.3243e-02, -1.8161e-02,  2.0455e-03, -2.5656e-02,\n",
              "                      -6.4979e-03, -2.6932e-02, -4.0065e-02,  7.8478e-03, -1.7846e-02,\n",
              "                      -2.7083e-02,  1.8789e-02,  1.6451e-02, -3.0215e-02, -2.6410e-03,\n",
              "                      -2.0564e-02,  6.9488e-03, -4.3064e-02, -2.6728e-03, -2.6217e-02,\n",
              "                      -1.8710e-02,  2.3894e-02,  4.0092e-02,  1.2808e-02, -1.2702e-02,\n",
              "                      -1.8463e-02,  3.4087e-02, -3.9852e-03, -3.3932e-02,  3.8921e-03,\n",
              "                      -3.0223e-02,  1.0176e-02, -7.9782e-03, -2.9607e-02,  3.1746e-02,\n",
              "                       3.8560e-03, -3.2529e-03, -1.9652e-03, -5.0062e-03,  3.6468e-03,\n",
              "                       3.2952e-03, -8.5552e-03, -2.6112e-02, -2.8402e-03,  1.0007e-02,\n",
              "                      -2.1519e-02, -4.5340e-02,  1.1038e-02,  1.4920e-02,  4.5355e-02,\n",
              "                       1.3464e-02,  1.3373e-02,  3.6094e-03,  2.1954e-03,  2.6911e-02,\n",
              "                      -6.8388e-03, -1.4357e-02, -4.5771e-03, -1.5615e-02,  7.2409e-03,\n",
              "                       2.3605e-02, -3.7189e-03, -9.7118e-03, -1.6462e-02, -1.8803e-02,\n",
              "                      -3.2088e-02,  8.1841e-04, -8.4033e-03, -2.0778e-02, -6.7267e-03,\n",
              "                      -1.2973e-03,  1.2676e-02,  4.5818e-03, -4.6705e-02, -1.0431e-02,\n",
              "                      -1.9296e-02,  9.2351e-03, -2.2712e-02, -3.8868e-03,  3.6506e-02,\n",
              "                      -1.4860e-02, -1.6456e-02,  2.6409e-02, -7.0753e-03, -5.8590e-03,\n",
              "                       7.5886e-03,  2.7275e-02,  1.2665e-02, -3.3094e-02,  7.8695e-03,\n",
              "                      -1.3238e-02,  8.0997e-03,  3.2377e-02,  1.5165e-02,  6.0899e-03,\n",
              "                       2.9863e-02, -7.8136e-03,  4.9904e-02, -5.8021e-03,  8.0534e-03,\n",
              "                       2.6962e-03,  1.8918e-02,  2.7823e-03, -2.1119e-03,  7.4468e-03,\n",
              "                       2.0704e-02, -2.5208e-02,  2.3950e-03, -1.9953e-02,  3.0557e-03,\n",
              "                      -2.5347e-02,  7.1423e-03,  4.5253e-02, -8.5313e-03,  1.3532e-02,\n",
              "                      -3.5467e-02,  6.3473e-03, -9.8297e-03,  4.5232e-02, -1.1181e-02,\n",
              "                       1.9463e-02,  5.3567e-03,  3.6134e-02, -3.1268e-02,  5.8620e-03,\n",
              "                       1.4618e-02, -2.3817e-02,  4.3041e-02,  4.3524e-02,  1.7924e-02,\n",
              "                      -3.4672e-03,  4.2598e-02, -6.3301e-03, -5.2033e-02,  3.2883e-03,\n",
              "                      -1.2123e-02, -1.6827e-02, -1.4555e-02,  3.9437e-02, -1.1940e-02,\n",
              "                       1.3899e-02, -2.7719e-02,  2.6317e-02,  2.4035e-02,  4.9079e-03,\n",
              "                      -9.9968e-03,  3.2631e-02,  3.0037e-02,  7.1507e-03,  7.8830e-03,\n",
              "                      -2.0158e-02, -5.3587e-03,  1.9571e-02, -2.2971e-03,  4.4332e-02,\n",
              "                       2.4969e-02, -6.0074e-04,  1.3038e-02,  1.2018e-02,  1.0767e-02,\n",
              "                       3.9113e-02,  8.6296e-03, -2.9911e-02, -1.1165e-02, -1.3999e-03,\n",
              "                      -1.3724e-02,  9.3532e-03, -1.8942e-03,  2.3560e-02,  1.2978e-02,\n",
              "                       2.4814e-02,  4.1942e-02, -2.3989e-02, -6.7744e-03,  1.1126e-02,\n",
              "                       1.0536e-03,  1.8032e-02,  2.2875e-02, -2.4535e-02,  2.1435e-02,\n",
              "                       2.1512e-02, -2.7186e-02, -4.1634e-02,  3.0473e-02, -3.4052e-02,\n",
              "                      -2.1385e-02,  2.5817e-02, -3.2548e-02,  1.3828e-02, -2.7556e-02,\n",
              "                       1.1950e-02,  2.9068e-02])),\n",
              "             ('conv_block5.4.running_var',\n",
              "              tensor([0.9133, 0.9167, 0.9192, 0.9297, 0.9145, 0.9189, 0.9102, 0.9121, 0.9139,\n",
              "                      0.9146, 0.9159, 0.9209, 0.9378, 0.9262, 0.9251, 0.9114, 0.9100, 0.9196,\n",
              "                      0.9292, 0.9081, 0.9139, 0.9132, 0.9232, 0.9132, 0.9188, 0.9093, 0.9213,\n",
              "                      0.9152, 0.9165, 0.9211, 0.9157, 0.9157, 0.9149, 0.9140, 0.9185, 0.9144,\n",
              "                      0.9201, 0.9159, 0.9151, 0.9131, 0.9242, 0.9128, 0.9263, 0.9219, 0.9257,\n",
              "                      0.9285, 0.9291, 0.9145, 0.9259, 0.9159, 0.9160, 0.9155, 0.9156, 0.9174,\n",
              "                      0.9122, 0.9138, 0.9149, 0.9135, 0.9152, 0.9258, 0.9420, 0.9221, 0.9142,\n",
              "                      0.9095, 0.9108, 0.9230, 0.9142, 0.9146, 0.9093, 0.9423, 0.9120, 0.9129,\n",
              "                      0.9217, 0.9127, 0.9190, 0.9091, 0.9187, 0.9199, 0.9099, 0.9235, 0.9088,\n",
              "                      0.9118, 0.9215, 0.9126, 0.9109, 0.9132, 0.9121, 0.9133, 0.9105, 0.9104,\n",
              "                      0.9193, 0.9206, 0.9137, 0.9170, 0.9131, 0.9078, 0.9118, 0.9139, 0.9194,\n",
              "                      0.9166, 0.9154, 0.9118, 0.9174, 0.9364, 0.9195, 0.9105, 0.9116, 0.9136,\n",
              "                      0.9142, 0.9092, 0.9102, 0.9305, 0.9112, 0.9129, 0.9168, 0.9255, 0.9206,\n",
              "                      0.9192, 0.9134, 0.9375, 0.9168, 0.9092, 0.9120, 0.9179, 0.9187, 0.9216,\n",
              "                      0.9166, 0.9098, 0.9123, 0.9110, 0.9199, 0.9166, 0.9137, 0.9172, 0.9097,\n",
              "                      0.9179, 0.9141, 0.9213, 0.9094, 0.9159, 0.9151, 0.9179, 0.9103, 0.9143,\n",
              "                      0.9123, 0.9106, 0.9111, 0.9151, 0.9225, 0.9114, 0.9203, 0.9143, 0.9136,\n",
              "                      0.9289, 0.9256, 0.9194, 0.9221, 0.9143, 0.9163, 0.9108, 0.9095, 0.9186,\n",
              "                      0.9125, 0.9101, 0.9146, 0.9173, 0.9165, 0.9251, 0.9205, 0.9113, 0.9228,\n",
              "                      0.9231, 0.9377, 0.9179, 0.9147, 0.9126, 0.9123, 0.9200, 0.9120, 0.9330,\n",
              "                      0.9118, 0.9260, 0.9157, 0.9129, 0.9180, 0.9205, 0.9114, 0.9117, 0.9130,\n",
              "                      0.9156, 0.9083, 0.9134, 0.9253, 0.9105, 0.9284, 0.9109, 0.9153, 0.9164,\n",
              "                      0.9230, 0.9220, 0.9096, 0.9128, 0.9201, 0.9253, 0.9157, 0.9124, 0.9094,\n",
              "                      0.9199, 0.9101, 0.9392, 0.9158, 0.9151, 0.9118, 0.9106, 0.9216, 0.9186,\n",
              "                      0.9143, 0.9121, 0.9236, 0.9144, 0.9102, 0.9186, 0.9224, 0.9120, 0.9180,\n",
              "                      0.9106, 0.9114, 0.9218, 0.9124, 0.9142, 0.9112, 0.9115, 0.9157, 0.9120,\n",
              "                      0.9127, 0.9142, 0.9436, 0.9153, 0.9151, 0.9099, 0.9244, 0.9128, 0.9149,\n",
              "                      0.9084, 0.9143, 0.9321, 0.9174, 0.9250, 0.9171, 0.9205, 0.9317, 0.9117,\n",
              "                      0.9138, 0.9208, 0.9140, 0.9194, 0.9398, 0.9230, 0.9210, 0.9112, 0.9166,\n",
              "                      0.9126, 0.9257, 0.9114, 0.9148, 0.9165, 0.9104, 0.9167, 0.9289, 0.9127,\n",
              "                      0.9249, 0.9221, 0.9132, 0.9141, 0.9196, 0.9105, 0.9129, 0.9165, 0.9111,\n",
              "                      0.9129, 0.9212, 0.9252, 0.9180, 0.9123, 0.9347, 0.9264, 0.9236, 0.9182,\n",
              "                      0.9151, 0.9151, 0.9161, 0.9129, 0.9396, 0.9131, 0.9173, 0.9112, 0.9356,\n",
              "                      0.9103, 0.9211, 0.9227, 0.9118, 0.9147, 0.9100, 0.9116, 0.9148, 0.9102,\n",
              "                      0.9188, 0.9199, 0.9197, 0.9121, 0.9239, 0.9211, 0.9192, 0.9229, 0.9210,\n",
              "                      0.9248, 0.9116, 0.9160, 0.9094, 0.9230, 0.9100, 0.9336, 0.9100, 0.9109,\n",
              "                      0.9127, 0.9219, 0.9146, 0.9300, 0.9379, 0.9187, 0.9212, 0.9153, 0.9199,\n",
              "                      0.9230, 0.9184, 0.9208, 0.9163, 0.9159, 0.9176, 0.9144, 0.9193, 0.9279,\n",
              "                      0.9150, 0.9100, 0.9338, 0.9088, 0.9168, 0.9222, 0.9204, 0.9253, 0.9344,\n",
              "                      0.9145, 0.9130, 0.9092, 0.9217, 0.9207, 0.9155, 0.9140, 0.9094, 0.9166,\n",
              "                      0.9130, 0.9289, 0.9119, 0.9196, 0.9167, 0.9408, 0.9296, 0.9116, 0.9129,\n",
              "                      0.9178, 0.9126, 0.9149, 0.9208, 0.9122, 0.9290, 0.9155, 0.9125, 0.9085,\n",
              "                      0.9157, 0.9231, 0.9265, 0.9173, 0.9101, 0.9096, 0.9250, 0.9193, 0.9132,\n",
              "                      0.9177, 0.9193, 0.9250, 0.9094, 0.9169, 0.9144, 0.9167, 0.9146, 0.9303,\n",
              "                      0.9161, 0.9113, 0.9164, 0.9140, 0.9147, 0.9132, 0.9310, 0.9100, 0.9127,\n",
              "                      0.9151, 0.9166, 0.9149, 0.9107, 0.9109, 0.9132, 0.9172, 0.9119, 0.9329,\n",
              "                      0.9331, 0.9410, 0.9153, 0.9290, 0.9138, 0.9175, 0.9129, 0.9331, 0.9153,\n",
              "                      0.9215, 0.9097, 0.9327, 0.9121, 0.9139, 0.9118, 0.9229, 0.9220, 0.9162,\n",
              "                      0.9145, 0.9162, 0.9105, 0.9163, 0.9138, 0.9200, 0.9104, 0.9228, 0.9135,\n",
              "                      0.9252, 0.9242, 0.9206, 0.9337, 0.9144, 0.9112, 0.9128, 0.9090, 0.9104,\n",
              "                      0.9226, 0.9145, 0.9234, 0.9374, 0.9121, 0.9229, 0.9154, 0.9278, 0.9137,\n",
              "                      0.9155, 0.9214, 0.9534, 0.9319, 0.9195, 0.9126, 0.9247, 0.9109, 0.9162,\n",
              "                      0.9213, 0.9092, 0.9231, 0.9156, 0.9122, 0.9123, 0.9157, 0.9159, 0.9100,\n",
              "                      0.9100, 0.9113, 0.9247, 0.9170, 0.9171, 0.9106, 0.9116, 0.9142, 0.9106,\n",
              "                      0.9271, 0.9385, 0.9161, 0.9216, 0.9288, 0.9128, 0.9230, 0.9127, 0.9283,\n",
              "                      0.9113, 0.9334, 0.9181, 0.9226, 0.9202, 0.9137, 0.9098, 0.9127, 0.9354,\n",
              "                      0.9176, 0.9115, 0.9108, 0.9318, 0.9136, 0.9249, 0.9147, 0.9172])),\n",
              "             ('conv_block5.4.num_batches_tracked', tensor(1)),\n",
              "             ('classifier.0.weight',\n",
              "              tensor([[-0.0070, -0.0054, -0.0245,  ...,  0.0245,  0.0345, -0.0160],\n",
              "                      [ 0.0113,  0.0045, -0.0079,  ..., -0.0339, -0.0232,  0.0342],\n",
              "                      [-0.0244, -0.0276,  0.0265,  ..., -0.0127, -0.0223, -0.0074],\n",
              "                      ...,\n",
              "                      [-0.0273,  0.0074,  0.0268,  ..., -0.0330,  0.0253,  0.0323],\n",
              "                      [-0.0189, -0.0322,  0.0163,  ..., -0.0197,  0.0191, -0.0017],\n",
              "                      [-0.0067,  0.0017, -0.0041,  ..., -0.0315, -0.0272,  0.0275]])),\n",
              "             ('classifier.0.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
              "             ('classifier.3.weight',\n",
              "              tensor([[-0.0336, -0.0251,  0.0029,  ..., -0.0248, -0.0006, -0.0014],\n",
              "                      [-0.0160,  0.0008,  0.0330,  ...,  0.0015,  0.0069, -0.0183],\n",
              "                      [-0.0208,  0.0325,  0.0020,  ..., -0.0277, -0.0309, -0.0271],\n",
              "                      ...,\n",
              "                      [-0.0189, -0.0331,  0.0282,  ..., -0.0078, -0.0135, -0.0127],\n",
              "                      [-0.0295, -0.0043,  0.0106,  ..., -0.0176, -0.0013,  0.0250],\n",
              "                      [ 0.0120, -0.0271, -0.0006,  ..., -0.0145, -0.0082, -0.0177]])),\n",
              "             ('classifier.3.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
              "             ('classifier.6.weight',\n",
              "              tensor([[-0.0487,  0.0138, -0.0570,  ..., -0.0002, -0.0320,  0.0142],\n",
              "                      [ 0.0563, -0.0081, -0.0201,  ...,  0.0177, -0.0203, -0.0639],\n",
              "                      [ 0.0497,  0.0477,  0.0257,  ...,  0.0482, -0.0669,  0.0238],\n",
              "                      ...,\n",
              "                      [-0.0278,  0.0760,  0.0355,  ..., -0.0154, -0.0426, -0.0171],\n",
              "                      [-0.0541, -0.0478,  0.0630,  ..., -0.0098, -0.0594,  0.0701],\n",
              "                      [ 0.0407,  0.0008,  0.0082,  ...,  0.0617, -0.0370,  0.0459]])),\n",
              "             ('classifier.6.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]))])"
            ]
          },
          "execution_count": 19,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "model.state_dict()"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "wHD02aNt4pNv"
      },
      "source": [
        "# 设置交叉熵损失函数，SGD优化器"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 20,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.023837Z",
          "start_time": "2025-06-26T01:43:40.019952Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "J1dvP3ES4pNv",
        "outputId": "9aeac7f8-783f-4543-e184-8c03019d660b"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "损失函数: CrossEntropyLoss()\n"
          ]
        }
      ],
      "source": [
        "model = VGG11()\n",
        "# 定义损失函数和优化器\n",
        "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
        "\n",
        "print(\"损失函数:\", loss_fn)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 21,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.035848Z",
          "start_time": "2025-06-26T01:43:40.032419Z"
        },
        "id": "qUeLZMIE4pNv"
      },
      "outputs": [],
      "source": [
        "model = VGG11()\n",
        "\n",
        "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 22,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.732814Z",
          "start_time": "2025-06-26T01:43:40.035848Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 123,
          "referenced_widgets": [
            "6ffee922c3534303b77040167f5ea168",
            "49492015d05746d6b5509b547a59ef9f",
            "257f1a5b41cb452594f3fdea255cc3e7",
            "7b327c3be2714bfbbdfd40c6de04e7af",
            "64664de1cfd242fbae10a4e355e1a71d",
            "312aa4960d4c46679e8772ebcab23a48",
            "ac414069f3b04cf59591cf4569060054",
            "8d259ce1b86544a7976ea92681bea157",
            "26edd4d49d42446f9b4fd1467258e87d",
            "a5bbd3a5d0594820bdbbf5b02927d73c",
            "3be244a7a21b4e9b8b453cd02d60f009"
          ]
        },
        "id": "qI1L-GG94pNv",
        "outputId": "43df9645-861e-4f9f-e4a6-330179de7662"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "使用设备: cuda:0\n",
            "训练开始，共35200步\n"
          ]
        },
        {
          "data": {
            "application/vnd.jupyter.widget-view+json": {
              "model_id": "6ffee922c3534303b77040167f5ea168",
              "version_major": 2,
              "version_minor": 0
            },
            "text/plain": [
              "  0%|          | 0/35200 [00:00<?, ?it/s]"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "早停触发! 最佳验证准确率(如果是回归，这里是损失): 78.1400\n",
            "早停: 在18500 步\n"
          ]
        }
      ],
      "source": [
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
        "print(f\"使用设备: {device}\")\n",
        "model = model.to(device) #将模型移动到GPU\n",
        "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
        "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
        "\n",
        "\n",
        "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50, early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 23,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.737721Z",
          "start_time": "2025-06-26T01:45:37.732814Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "pJWn5FRH4pNv",
        "outputId": "6752af12-65bf-42d0-a84a-09bb69ac1839"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "[{'loss': 0.050599511712789536, 'acc': 98.4375, 'step': 18401},\n",
              " {'loss': 0.031586505472660065, 'acc': 98.4375, 'step': 18402},\n",
              " {'loss': 0.04562567174434662, 'acc': 98.4375, 'step': 18403},\n",
              " {'loss': 0.018415573984384537, 'acc': 98.4375, 'step': 18404},\n",
              " {'loss': 0.004098102450370789, 'acc': 100.0, 'step': 18405},\n",
              " {'loss': 0.014227667823433876, 'acc': 100.0, 'step': 18406},\n",
              " {'loss': 0.0013326209736987948, 'acc': 100.0, 'step': 18407},\n",
              " {'loss': 0.07926805317401886, 'acc': 98.4375, 'step': 18408},\n",
              " {'loss': 0.005051522981375456, 'acc': 100.0, 'step': 18409},\n",
              " {'loss': 0.000851876160595566, 'acc': 100.0, 'step': 18410},\n",
              " {'loss': 0.006027668248862028, 'acc': 100.0, 'step': 18411},\n",
              " {'loss': 0.008286467753350735, 'acc': 100.0, 'step': 18412},\n",
              " {'loss': 0.005296706687659025, 'acc': 100.0, 'step': 18413},\n",
              " {'loss': 0.017675155773758888, 'acc': 98.4375, 'step': 18414},\n",
              " {'loss': 0.0122974943369627, 'acc': 100.0, 'step': 18415},\n",
              " {'loss': 0.01907619833946228, 'acc': 98.4375, 'step': 18416},\n",
              " {'loss': 0.0016834294656291604, 'acc': 100.0, 'step': 18417},\n",
              " {'loss': 0.010099830105900764, 'acc': 100.0, 'step': 18418},\n",
              " {'loss': 0.0081954225897789, 'acc': 100.0, 'step': 18419},\n",
              " {'loss': 0.08778432011604309, 'acc': 95.3125, 'step': 18420},\n",
              " {'loss': 0.008762146346271038, 'acc': 100.0, 'step': 18421},\n",
              " {'loss': 0.04591457545757294, 'acc': 96.875, 'step': 18422},\n",
              " {'loss': 0.009161267429590225, 'acc': 100.0, 'step': 18423},\n",
              " {'loss': 0.00046077350270934403, 'acc': 100.0, 'step': 18424},\n",
              " {'loss': 0.008485933765769005, 'acc': 100.0, 'step': 18425},\n",
              " {'loss': 0.03843671455979347, 'acc': 98.4375, 'step': 18426},\n",
              " {'loss': 0.004387142602354288, 'acc': 100.0, 'step': 18427},\n",
              " {'loss': 0.00410472322255373, 'acc': 100.0, 'step': 18428},\n",
              " {'loss': 0.007090692408382893, 'acc': 100.0, 'step': 18429},\n",
              " {'loss': 0.0009846019092947245, 'acc': 100.0, 'step': 18430},\n",
              " {'loss': 0.030052514746785164, 'acc': 98.4375, 'step': 18431},\n",
              " {'loss': 0.022075394168496132, 'acc': 100.0, 'step': 18432},\n",
              " {'loss': 0.0006923830951564014, 'acc': 100.0, 'step': 18433},\n",
              " {'loss': 0.06520601361989975, 'acc': 98.4375, 'step': 18434},\n",
              " {'loss': 0.004204081371426582, 'acc': 100.0, 'step': 18435},\n",
              " {'loss': 0.03394153341650963, 'acc': 98.4375, 'step': 18436},\n",
              " {'loss': 0.028509024530649185, 'acc': 98.4375, 'step': 18437},\n",
              " {'loss': 0.004907775670289993, 'acc': 100.0, 'step': 18438},\n",
              " {'loss': 0.002516702748835087, 'acc': 100.0, 'step': 18439},\n",
              " {'loss': 0.04672648012638092, 'acc': 98.4375, 'step': 18440},\n",
              " {'loss': 0.002682328224182129, 'acc': 100.0, 'step': 18441},\n",
              " {'loss': 0.009593350812792778, 'acc': 100.0, 'step': 18442},\n",
              " {'loss': 0.0021312686149030924, 'acc': 100.0, 'step': 18443},\n",
              " {'loss': 0.001943283248692751, 'acc': 100.0, 'step': 18444},\n",
              " {'loss': 0.015406881459057331, 'acc': 100.0, 'step': 18445},\n",
              " {'loss': 0.005508589092642069, 'acc': 100.0, 'step': 18446},\n",
              " {'loss': 0.008091014809906483, 'acc': 100.0, 'step': 18447},\n",
              " {'loss': 0.007744696922600269, 'acc': 100.0, 'step': 18448},\n",
              " {'loss': 0.07224255800247192, 'acc': 98.4375, 'step': 18449},\n",
              " {'loss': 0.027997758239507675, 'acc': 98.4375, 'step': 18450},\n",
              " {'loss': 0.0016873879358172417, 'acc': 100.0, 'step': 18451},\n",
              " {'loss': 0.0060630254447460175, 'acc': 100.0, 'step': 18452},\n",
              " {'loss': 0.00043788139009848237, 'acc': 100.0, 'step': 18453},\n",
              " {'loss': 0.0007570580346509814, 'acc': 100.0, 'step': 18454},\n",
              " {'loss': 0.03204948082566261, 'acc': 98.4375, 'step': 18455},\n",
              " {'loss': 0.005051369778811932, 'acc': 100.0, 'step': 18456},\n",
              " {'loss': 0.09118255972862244, 'acc': 96.875, 'step': 18457},\n",
              " {'loss': 0.012942755594849586, 'acc': 100.0, 'step': 18458},\n",
              " {'loss': 0.0023528486490249634, 'acc': 100.0, 'step': 18459},\n",
              " {'loss': 0.015447930432856083, 'acc': 100.0, 'step': 18460},\n",
              " {'loss': 0.050802070647478104, 'acc': 96.875, 'step': 18461},\n",
              " {'loss': 0.01958540640771389, 'acc': 98.4375, 'step': 18462},\n",
              " {'loss': 0.02613057568669319, 'acc': 98.4375, 'step': 18463},\n",
              " {'loss': 0.003426611190661788, 'acc': 100.0, 'step': 18464},\n",
              " {'loss': 0.01220821961760521, 'acc': 100.0, 'step': 18465},\n",
              " {'loss': 0.006213122513145208, 'acc': 100.0, 'step': 18466},\n",
              " {'loss': 0.0002550019999034703, 'acc': 100.0, 'step': 18467},\n",
              " {'loss': 0.0021320346277207136, 'acc': 100.0, 'step': 18468},\n",
              " {'loss': 0.0012046537594869733, 'acc': 100.0, 'step': 18469},\n",
              " {'loss': 0.020659882575273514, 'acc': 98.4375, 'step': 18470},\n",
              " {'loss': 0.029387472197413445, 'acc': 100.0, 'step': 18471},\n",
              " {'loss': 0.003713669953867793, 'acc': 100.0, 'step': 18472},\n",
              " {'loss': 0.0021769183222204447, 'acc': 100.0, 'step': 18473},\n",
              " {'loss': 0.01620328798890114, 'acc': 100.0, 'step': 18474},\n",
              " {'loss': 0.000997700379230082, 'acc': 100.0, 'step': 18475},\n",
              " {'loss': 0.010679603554308414, 'acc': 100.0, 'step': 18476},\n",
              " {'loss': 0.034296780824661255, 'acc': 98.4375, 'step': 18477},\n",
              " {'loss': 0.002872479846701026, 'acc': 100.0, 'step': 18478},\n",
              " {'loss': 0.04545372352004051, 'acc': 98.4375, 'step': 18479},\n",
              " {'loss': 0.0027251518331468105, 'acc': 100.0, 'step': 18480},\n",
              " {'loss': 0.053410936146974564, 'acc': 98.4375, 'step': 18481},\n",
              " {'loss': 0.11141286045312881, 'acc': 95.3125, 'step': 18482},\n",
              " {'loss': 0.01636582985520363, 'acc': 98.4375, 'step': 18483},\n",
              " {'loss': 0.027017461135983467, 'acc': 98.4375, 'step': 18484},\n",
              " {'loss': 0.0017179715214297175, 'acc': 100.0, 'step': 18485},\n",
              " {'loss': 0.006858266424387693, 'acc': 100.0, 'step': 18486},\n",
              " {'loss': 0.030940772965550423, 'acc': 98.4375, 'step': 18487},\n",
              " {'loss': 0.0008208189974538982, 'acc': 100.0, 'step': 18488},\n",
              " {'loss': 0.0018773316405713558, 'acc': 100.0, 'step': 18489},\n",
              " {'loss': 0.002927182475104928, 'acc': 100.0, 'step': 18490},\n",
              " {'loss': 0.002819039626047015, 'acc': 100.0, 'step': 18491},\n",
              " {'loss': 0.004885886795818806, 'acc': 100.0, 'step': 18492},\n",
              " {'loss': 0.024063149467110634, 'acc': 98.4375, 'step': 18493},\n",
              " {'loss': 0.004361123777925968, 'acc': 100.0, 'step': 18494},\n",
              " {'loss': 0.003530301619321108, 'acc': 100.0, 'step': 18495},\n",
              " {'loss': 0.008875619620084763, 'acc': 100.0, 'step': 18496},\n",
              " {'loss': 0.023679403588175774, 'acc': 98.4375, 'step': 18497},\n",
              " {'loss': 0.01611325889825821, 'acc': 98.4375, 'step': 18498},\n",
              " {'loss': 0.0013549050781875849, 'acc': 100.0, 'step': 18499}]"
            ]
          },
          "execution_count": 23,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "history['train'][-100:-1]"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 24,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.741226Z",
          "start_time": "2025-06-26T01:45:37.737721Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "pMjJdQ2l4pNw",
        "outputId": "11571efd-32da-4e79-c411-19127ce49f68"
      },
      "outputs": [
        {
          "data": {
            "text/plain": [
              "[{'loss': 2.3024572368621827, 'acc': 10.72, 'step': 0},\n",
              " {'loss': 1.2724554433822632, 'acc': 53.08, 'step': 500},\n",
              " {'loss': 1.0528720901489257, 'acc': 61.48, 'step': 1000},\n",
              " {'loss': 0.9429292194366455, 'acc': 67.12, 'step': 1500},\n",
              " {'loss': 0.8738024176597595, 'acc': 69.1, 'step': 2000},\n",
              " {'loss': 0.8594518756866455, 'acc': 69.78, 'step': 2500},\n",
              " {'loss': 0.7938815521240234, 'acc': 73.64, 'step': 3000},\n",
              " {'loss': 0.7996832118034363, 'acc': 73.16, 'step': 3500},\n",
              " {'loss': 0.8305575358390808, 'acc': 72.52, 'step': 4000},\n",
              " {'loss': 0.8041817101716995, 'acc': 75.18, 'step': 4500},\n",
              " {'loss': 0.8287751563072204, 'acc': 74.66, 'step': 5000},\n",
              " {'loss': 0.8406979232788085, 'acc': 75.58, 'step': 5500},\n",
              " {'loss': 0.9993303775787353, 'acc': 73.08, 'step': 6000},\n",
              " {'loss': 0.9485766896247864, 'acc': 75.64, 'step': 6500},\n",
              " {'loss': 0.9304439280509949, 'acc': 75.14, 'step': 7000},\n",
              " {'loss': 1.2782565168380737, 'acc': 71.0, 'step': 7500},\n",
              " {'loss': 1.1031286642313003, 'acc': 74.88, 'step': 8000},\n",
              " {'loss': 1.026307428741455, 'acc': 75.74, 'step': 8500},\n",
              " {'loss': 1.020821224975586, 'acc': 75.96, 'step': 9000},\n",
              " {'loss': 1.0229072147309781, 'acc': 76.42, 'step': 9500},\n",
              " {'loss': 1.0008010182380676, 'acc': 77.22, 'step': 10000},\n",
              " {'loss': 1.129536530971527, 'acc': 75.22, 'step': 10500},\n",
              " {'loss': 1.1221372065067292, 'acc': 77.12, 'step': 11000},\n",
              " {'loss': 1.0168406055927277, 'acc': 77.48, 'step': 11500},\n",
              " {'loss': 1.1099179193019868, 'acc': 76.4, 'step': 12000},\n",
              " {'loss': 1.0585193243980409, 'acc': 77.62, 'step': 12500},\n",
              " {'loss': 1.084789436149597, 'acc': 77.76, 'step': 13000},\n",
              " {'loss': 1.113554142475128, 'acc': 77.94, 'step': 13500},\n",
              " {'loss': 1.167115535055101, 'acc': 76.86, 'step': 14000},\n",
              " {'loss': 1.2743188953876496, 'acc': 76.48, 'step': 14500},\n",
              " {'loss': 1.0851726240158082, 'acc': 77.02, 'step': 15000},\n",
              " {'loss': 1.1766109198093415, 'acc': 76.66, 'step': 15500},\n",
              " {'loss': 1.140500514984131, 'acc': 78.14, 'step': 16000},\n",
              " {'loss': 1.1618044478178025, 'acc': 77.92, 'step': 16500},\n",
              " {'loss': 1.12559452791214, 'acc': 78.0, 'step': 17000},\n",
              " {'loss': 1.1808834407806397, 'acc': 77.46, 'step': 17500},\n",
              " {'loss': 1.1756121884822845, 'acc': 77.88, 'step': 18000}]"
            ]
          },
          "execution_count": 24,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "history['val'][-1000:-1]"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "NcujMCRC4pNw"
      },
      "source": [
        "# 绘制损失曲线和准确率曲线"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 25,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.816716Z",
          "start_time": "2025-06-26T01:45:37.744941Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 465
        },
        "id": "3xZ57j-C4pNw",
        "outputId": "bc670f93-9497-43e7-8d11-f266b353aa88"
      },
      "outputs": [
        {
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzoAAAHACAYAAABqJx3iAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAuCdJREFUeJzs3Xd4lGXWx/HvzKR3AqTQe+9FpYiINDuKdXUta1l3xRVxdWVfK7qyurZ17brK6toLrhWJKB3pIL0GQklCCeltkpn3j3tmSCAhM8mEtN/nuubKlOd55p5HnJkz59zntjidTiciIiIiIiKNiLWuByAiIiIiIuJvCnRERERERKTRUaAjIiIiIiKNjgIdERERERFpdBToiIiIiIhIo6NAR0REREREGh0FOiIiIiIi0ugo0BERERERkUYnoK4H4A2Hw8HBgweJjIzEYrHU9XBERJoMp9NJTk4OrVq1wmrVb2Nu+lwSEak73n42NYhA5+DBg7Rt27auhyEi0mTt27ePNm3a1PUw6g19LomI1L2qPpsaRKATGRkJmBcTFRXl8/52u525c+cyfvx4AgMD/T28Jkvntfbo3NYOnVffZWdn07ZtW8/7sBj6XKq/dG5rj85t7dB59Z23n00NItBxlwVERUVV+wMlLCyMqKgo/QPyI53X2qNzWzt0XqtP5Vnl6XOp/tK5rT06t7VD57X6qvpsUsG1iIiIiIg0Ogp0RERERESk0VGgIyIiIiIijU6DmKMjIvWT0+mkpKSE0tLSuh6KV+x2OwEBARQWFjaYMdc2m81GQECA5uCIiEijo0BHRKqluLiY1NRU8vPz63ooXnM6nSQkJLBv3z59sS8jLCyMxMREgoKC6nooIiIifqNAR0R85nA4SE5Oxmaz0apVK4KCghpE4OBwOMjNzSUiIkKLX2ICv+LiYg4fPkxycjJdu3bVeRERkUZDgY6I+Ky4uBiHw0Hbtm0JCwur6+F4zeFwUFxcTEhIiL7Qu4SGhhIYGMjevXs950ZERKQx0Ce9iFSbgoXGQf8dRUSkMdKnm4iIiIiINDoKdEREREREpNFRoCMiUk0dOnTghRde8Mux5s+fj8ViITMz0y/Ha8wWLlzIxRdfTKtWrbBYLHz55ZflHnc6nTz88MMkJiYSGhrK2LFj2bFjR7ltMjIyuO6664iKiiImJoZbbrmF3Nzc0/gqRESktinQEZEm5aKLLuKee+7xy7FWrlzJ7bff7pdjiffy8vLo378/L7/8coWPP/3007z44ou89tprLF++nPDwcCZMmEBhYaFnm+uuu45NmzaRlJTEN998w8KFC/XfUkSkkVHXNRGRMpxOJ6WlpQQEVP322LJly9MwIjnR+eefz/nnn1/hY06nkxdeeIEHH3yQSy+9FIB3332X+Ph4vvzyS6655hq2bNnCnDlzWLlyJUOGDAHgX//6FxdccAHPPPMMrVq1Om2vRUREak+jD3QW7zjCE99uJqLEygV1PRiRRsrpdFJgL62T5w4NtHm9hs/NN9/MkiVLWLJkCS+++CIA77zzDjfffDPfffcdDz74IBs2bGDu3Lm0bduWadOm8csvv5CXl0fPnj2ZOXMmY8eO9RyvQ4cOTJ06lalTpwJgsVh48803+fbbb/nhhx9o3bo1zz77LJdcckm1Xtvnn3/Oww8/zM6dO0lMTOSuu+7i3nvv9Tz+yiuv8Pzzz7Nv3z6io6M5++yz+eyzzwD47LPPeOyxx9i5cydhYWEMHDiQ//3vf4SHh1drLA1FcnIyaWlp5f47RUdHc+aZZ7Js2TKuueYali1bRkxMjCfIARg7dixWq5Xly5dz2WWXnXTcoqIiioqKPLezs7MBsNvt2O12n8fp3qc6+8qp6dye7MOV+/h522H+flkfYsOrvzDw6Ti36/dn8fQP25lybieGdWpeK8/hdDr5+tc03v0lhcIafnZZgJFdW3D3mM6EBNp83j+nsIRn5m7j5002Xt61pNLPMwswuntLpozuRHA1nscb6dmF3P/5Ro7mFdfK8U/Up3UUf7+sT7X29fbfYKMPdIpKStmalkPb8Pq/mKFIQ1VgL6XXwz/UyXNvnjGBsCDv3speeOEFtmzZQv/+/Xn88ccB2LRpEwAPPPAAzzzzDJ06daJZs2bs27ePCy64gL/97W8EBwfz7rvvcvHFF7Nt2zbatWtX6XM89thjPP300/zjH//gX//6F9dddx179+4lNjbWp9e1evVqrrrqKh599FGuvvpqli5dyh//+EeaN2/OTTfdxKpVq/jTn/7Ee++9x/Dhw8nIyGDRokUApKamcu211/L0009z2WWXkZOTw6JFi3A6nT6NoSFKS0sDID4+vtz98fHxnsfS0tKIi4sr93hAQACxsbGebU40c+ZMHnvssZPunzt3bo3WkkpKSqr2vnJqOrfGzmx4aZMNJxYe+M9PTOrgqPExa+vc5pfAU+ttZBZb+MN7q/hL/1Kiqx+XVSjPDp8mW1l71H+zN7am5/L16mR+26WUthHe77crG/6700ZGkQWwkJqfV+Xz/G/Vbn7bpZTWfv7NyuGEV7dY2Z51+ma1lBbk8N13KdXaNz8/36vtGn2gEx5sXmJR3fzYLCL1SHR0NEFBQYSFhZGQkADA1q1bAZgxYwbjxo3zbBsbG0v//v09tx9//HFmz57NV199xZQpUyp9jptuuolrr70WgCeffJIXX3yRFStWMHHiRJ/G+txzz3Heeefx0EMPAdCtWzc2b97MP/7xD2666SZSUlIIDw/noosuIjIykvbt2zNw4EDABDolJSVcfvnltG/fHoC+ffv69PxS3vTp05k2bZrndnZ2Nm3btmX8+PFERUX5fDy73U5SUhLjxo0jMDDQn0Nt8nRuj8sqsPP3l5fhxMxPW340kKduOptmYdWLHmrz3DqdTv708a9kFqcDkFdiYU5mHO/cMBir1T8/Vi/ZdZQnv9hIenYRNquFP4zqyJAOzWp0zCM5RTz1w3bSc4t5YVMgfxrTmdvP7ojtFGMuLnHw4k+7eGNzMk4ntIoOYXxcHiPPHFRp2XR6diFP/bCd1Dw7z28K5J6xXfjd8A6nfB5fvLk4me1ZOwgNtPKPyX2JCKn9ECE6JJA+rX1//4TjWfWqNPpAJ0KBjkitCw20sXnGhDp7bn8oW8YEkJuby6OPPsq3337rCRwKCgpISTn1r0/9+vXzXA8PDycqKopDhw75PJ4tW7Z45pi4jRgxghdeeIHS0lLGjRtH+/bt6dSpExMnTmTixIlcdtllhIWF0b9/f8477zz69u3LhAkTGD9+PFdccQXNmtXsA70hcAew6enpJCYmeu5PT09nwIABnm1O/G9SUlJCRkaGZ/8TBQcHExwcfNL9gYGBNfrCV9P9pXJN/dw6nU4e+XoDqVmFtG8eRmigja1pOby/4gD3jOtWo2PXxrn9eGUKczalE2C18I8r+zH9iw0s3ZXBf5bv4/ZRnWt07EJ7KU/N2co7S/YA0LFFOM9fPYABbWNqPnBgTK9E/m/2Br7fmMZzP+5kwY6jPH/VANo1Pznbuz09h6kfrWNzqvmifsXgNvx1YjcW/TSXc7rHn/K8jumZyPQvfuXHLYd4+ocdLNh+lGev6k+bZtXPKgNs2J/Fc0k7AXjk4t5cNKBNjY53Onj776/Rd11zZ3QKFeiI1BqLxUJYUECdXLydn1OVE+eu/PnPf2b27Nk8+eSTLFq0iHXr1tG3b1+Ki09du3zim6/FYsHhqHmpyIkiIyNZs2YNH374IYmJiTz88MP079+fzMxMbDYbSUlJfP/99/Tq1Yt//etfdO/eneTkZL+Po77p2LEjCQkJzJs3z3NfdnY2y5cvZ9iwYQAMGzaMzMxMVq9e7dnmp59+wuFwcOaZZ572MYvUhk9X7+fbDakEWC3885qB3DWmKwCzlu4ht6ikjkdX3q7DuTz61WYA7h3fncsGtuHhi3oD8I8ftrHxQFa1j73xQBYX/2uxJ8j57Vnt+fZPI/0W5ADEhgfxynWDePbK/kQEB7B67zHO/+dCPl6Z4ikZdjicvL04mYv+tZjNqdk0CwvktesH8cyV/Yn0MnvSMjKYN28Ywt8v70tYkI3lyRmc/8Iivlizv9qlyXlFJfzpo7WUOJyc3yeBq4e2rdZx6qtGH+h4MjoOCw5H469PF5FTCwoKorS06l8+lixZwk033cRll11G3759SUhIYM+ePbU/QJeePXuyZMmSk8bUrVs3bDaTxQoICGDs2LE8/fTT/Prrr+zZs4effvoJMAHWiBEjeOyxx1i7di1BQUHMnj37tI2/NuXm5rJu3TrWrVsHmAYE69atIyUlBYvFwtSpU3niiSf46quv2LBhAzfccAOtWrVi0qRJgDm3EydO5LbbbmPFihUsWbKEKVOmcM0116jjmjQKyUfyePQrM//wnnHdGNA2hol9EujUMpysAjvv/7K3jkd4XHGJg6kfraPAXsrwzs35/ahOAFx7Rlsm9I7HXurkTx+uJb/Yt+Cs1OHk5Z93ctkrS9hxKJeWkcG8c/NQHp/Ux+t5nb6wWCxMHtyG7+8+mzM6xpJXXMpfPt/A7e+tZuOBLG54ewUzvtlMcYmD0d1b8sPUUUzsk1j1gSt4nmvOaMd3fzqbQe1iyCkqYdon65nywVqOVaOJwIyvN5N8JI/E6BBmXt7Xbz8e1hdNpnQNIN9eSgWVByLShLRr144VK1awZ88eIiIiKs22dO3alS+++IKLL74Yi8XCQw89VCuZmcrce++9DB06lMcff5yrr76aZcuW8dJLL/HKK68A8M0337B7925GjRpFs2bN+O6773A4HHTv3p3ly5czb948xo8fT1xcHMuXL+fw4cP07NnztI2/Nq1atYpzzz3Xc9s9d+bGG29k1qxZ3H///eTl5XH77beTmZnJyJEjmTNnDiEhIZ593n//faZMmcJ5552H1Wpl8uTJnk58IlVxOp3M/H4rIQFWpo3vXmvP43A4eeSrTdisFqaN70ZUSNXlOsUlDu7+aC35xaWc1SmWO84xZV82q4U/nNOZ+z77lTcXJXPj8A4+dQnblpbDk99upm+gf78IP5u0jQ0HsogJC+S5qwZ45uNYLBb+fnk/1u9bxO4jeTz+zWZmXt6viqMZ+zLymfbJOlbuOQbAxN4JPHl53xp1nPNW29gwPrztLN5atJtn5m4jaXM6SZvNvKOQQCsPXtiL685sV+OAokOLcD75/TBeW7CLF37cwbcbUlm5J4NnruzPqG7eLX3w3YZUPl61D4sFnrtqADHVnLtVnzX6jE5IoBX3PK28epaqFZHTb8qUKdhsNnr16kXLli0rnXPz3HPP0axZM4YPH87FF1/MhAkTGDRo0Gkb56BBg/jkk0/46KOP6NOnDw8//DAzZszgpptuAiAmJoYvvviCMWPG0LNnT1577TU+/PBDevfuTVRUFAsXLuSCCy6gW7duPPjggzz77LOVrj3T0IwePRqn03nSZdasWYD5gjRjxgzS0tIoLCzkxx9/pFu38nMSYmNj+eCDD8jJySErK4u3336biAgf2iVJkzZ/22HeWLibF3/aydY07yZFV8evB7J475e9zFq6h/NfWMTy3Uer3Oe5pO38uj+L6NBAnr96QLnJ6pMGtqZ1TChHcov4dNU+r8eRV1TCHf9dzYIdR3h7u5W07MKqd/LCkp1HeH3BbgD+fnk/EqJDyj3eLDyI567qj8UCH67Yx/cbUk95PKfTySer9jHxhYWs3HOMiOAAnrmyP69eP+i0BDluNquF35/Tmf/dOZLu8ZEA9G8TzXd/Opvrz2rvt6xJgM3KlDFd+eKPw+nUMpxDOUXc8PYKHvnfRgqKT125cDCzgAc+/xWAP5zTmWGda6eVd11r9Bkdi8VCeHAAOYUl5KkjgUiT16VLF5YsWYLVevx3HnfwUFaHDh08ZWBud955Z7nbJ5ayVVQjnZmZ6dW43F/ey5o8eTKTJ0+ucPuRI0cyf/78Ch/r2bMnc+bM8ep5RcQ3TqeTl37e6bn95dqDPHB+9TpHVWX+tuNNMw5kFnDNm79w+9mdmDa+G8EBJ2djlu48wusLdwHw98v7khgdWu7xQJuV20d14pGvNvHagt1cc0Y7Am1V/+btLm8CyC+x8JfPN/LfW8+qUTe0jLxipn2yDoBrz2jHxD4VNwIZ3qUFvx/VmdcW7OKBLzYwoF3MSa8L4GhuEdO/2MBcV/bkjA6xPHtVf9rG1myifk30ahXFV3eNYNPBbPq2jvbqXFdHvzYxfHvX2Tw1Zyuzlu7hP8v2snjnEV64eiB920SftH2pw8nUj9eRXVhC/zbRNW5OUZ81+owOQHiQeTPI87G+U0RERKSs5ckZrN57zHP7q3UHam0O8M/bDgPw8EW9uHpIW5xOeH3hbi59aclJmaRjecXc88k6nE4zv+X8vhXP/7h6aFtaRARxILOA/607WOUYypY3PX5JL4KsTpbuzuDNRbur/bqcTid/+fxX0rOL6NwynIcuOnVZ7bRx3ejXJpqsAjv3fLyO0hPO909b05nwwiLmbk4n0GbhgfN78OHtZ9VpkOMWHGBjULtmtRbkuIUG2Xj0kt7853dnEBcZzK7DeVz2yhL+NW8HJaXly65fnb+TFckZhAfZ+Oc1A2t9bHWp8b6yMtyd1+pblxERaTruuOMOIiIiKrzccccddT08EfHSy65szlVD2hAZEsDBrEJW7Mnw+/MczS3i1/2ZAFzQN5GnrujH678dTGx4EFvTcrjkX0t4a9FuHA5TuvnAFyZw6NQynIcu6lXpcUMCbdwy0kz4f2X+zpOChrJOLG+6ZmgbLnctOPqPH7Z5xuerD1akkOQKSv55zcAqmwMEBVhd29n4ZXeGJ2uVX1zCX2dv4HezVnEkt4hu8RF8eecI7jins9/Wl2lozulmGh1c2DeREoeTZ5O2c9Xry9h71GTk1qYc4/kfdwDw2KV96NDCzyuP1jONvnQNjjckUOmaiNSVGTNm8Oc//7nCx6qz4KSInH7r92WyaMcRbFYLd43pitVi4aOV+/hy7QHO6uTfOQ4LdxzG6YSeiVGeuSsTeicwqF0zHvj8V+ZtPcQT325h3pZDDOvcnB82mcDhRS8Ch+vPaser83ey+3AeczelVZj9KXU4uefE8iZHKWfFOTkWEscPmw9x90fr+OaukZ4flL2xIz2Hx78xraT/MrEHfVqfXFpVkY4twnn0kt7c/9mvPDd3OzGhQbyxcBd7juYDcOvIjvx5QnefGiw0Vs3Cg3jpNwM5b20cj/xvE2tSMjn/n4t44PwevLUomVKHk4v7t2LyoNZ1PdRa10QyOq7SNWV0RKSOxMXF0aVLlwovcXFxdT08EfGCO5tz6YBWtI0NY9JA80Xx2w2pFNr9+2PqfFfZ2uju5TtotYwM5q0bh/DkZX0JDbSxbPdRnkvaDsD9E7wLHCJDArlpeAcAXvp5Z4XzC19bsIvlyRmEnVDeZLHAE5f2JjE6hOQjecz4erPXr+lQdiF3fbiWQruDs7u24HcjOnq9L8CVg9twYT+Tqfjr7A3sOZpPYnQIH9x6Jg9e1EtBThkWi4XLB7Xh+6lnc2bHWPKLS3n4f5tIycindUwoT0zq0+haSVfEp0Bn5syZDB06lMjISOLi4pg0aRLbtm075T6zZs3CYrGUu5Rt8Xk6hLt+2citogOFiIiISEW2p+cwd3M6Fgv8cbRp2XxGh1haRYeQU1jCz1sPVXEE75U6nCzcbgKdc7uf/EOIxWLhN2e24/u7z/YsfHl21xbcMtL7wOGmER0JDbSx6WA2C1zP5bY25ZgneHrskt4nlTfFhJmObhYLfLxqH99V0Q0N4PsNqYx/YSFb03JoHh7Es1f297mZgcVi4clJfWkdY5oRXDqgFXPuHsXwLi18Ok5T0qZZGB/cdhZ/vaAHQTYrNquFF64ZQHRo1a3KGwOfStcWLFjAnXfeydChQykpKeGvf/0r48ePZ/PmzSetKl5WVFRUuYDodEeQEcroiIiISA28Ot/MC5nYO4EucaZlsNVq4ZIBrXltwS5mrz1QaQMAX63fn8mxfDuRIQEMahdT6XYdWoTz2R3DWLsvk35ton0KHGLDg7juzHa8tTiZV37exWhXQJVbVMLdH5kJ/xf1S+SKwW0q3P+sTs354+jOvPzzLh74/FcGtI2hVczJ3dCyC+08+tUmvlhzAIDeraL45zUDiIuq3o/e0WGBfH3XSA4cK6iwo5iczGa1cPuozlzYrxUFxaV0iWs6rfR9CnRObFc6a9Ys4uLiWL16NaNGjap0P4vFQkJCxW0DTwc1IxAREZHqSjmaz1frTYeyP47uUu6xywaaQGf+tsNk5hf7ZdFFd9naqK4tCaiiI1aAzcrQDrHVep7bRnXi3WV7WbEngxXJGZzRMZZHypQ3/e2yvqf8cXrq2G4s3nmU9fsymfrxOj687axyTQCW7z7KtE/WcyCzAKsF/jC6M3ef142ggJrNnIgNDzqt6+I0Fq0rCEQbuxo1I8jKygLMwmunkpubS/v27XE4HAwaNIgnn3yS3r17V7p9UVERRUVFntvZ2aaFot1ux263+zzO0ADzP11OQfX2l4q5z6XOqf/V93Nrt9txOp04HA4cDkfVO9QT7jp099jFcDgcOJ1O7HY7Nlv5Gvf6+m9Q5HR6beEuSh1OzunW8qQsQveESHomRrElNZvvNqTxmzPb1fj53OvnnNPduxXuqys+KoQrhrThg+UpvPzzTtKy2/D5mv1YLfD81VWXNwXarPzz6gFc+OIiViRn8Or8nUwZ05WiklKeS9rOGwt343RC29hQnr9qAEOqGZCJVFe1Ax2Hw8HUqVMZMWIEffr0qXS77t278/bbb9OvXz+ysrJ45plnGD58OJs2baJNm4rToTNnzuSxxx476f65c+cSFuZ7T/TUAxbAxo7kFL77bo/P+8upJSUl1fUQGq36em4DAgJISEggNzeX4uLiuh6Oz3Jycup6CPVKcXExBQUFLFy4kJKS8pnv/Pz8OhqVSP2QllXIZ6v2A3DnuV0q3Oayga3YkprNl2sP1DjQOZxTxK/7zQ/Jo7vVbqADcMeozny0IoUF2w+z0tUme8qYrpzR0bugpEOLcGZc2od7P13P8z/uICE6lH8vTmZLqvmR+qohbXj44t6eDrgip1O1/9XdeeedbNy4kcWLF59yu2HDhjFs2DDP7eHDh9OzZ09ef/11Hn/88Qr3mT59OtOmTfPczs7Opm3btowfP75abVgPLUnmm5QdRLeI54ILBvq8v1TMbreTlJTEuHHjCAxsGpPaTpf6fm4LCwvZt28fERERp725SE04nU5ycnKIjIys9lzBTp06cffdd3P33XdXua3NZuPzzz9n0qRJ1Xqu06WwsJDQ0FBGjRp10n9Pd0ZdpKl6a9FuiksdnNEhttIv/5f0b83M77eyYk8G+zLya7RQpbsJQe9WUdWex+KLds3DuKR/K75cd5D84lIGtYvhT2MqDugqc/mg1szffpiv1x/kz5+uB0x52czL+zKhd91NXRCpVqAzZcoUvvnmGxYuXFhpVqYygYGBDBw4kJ07d1a6TXBwMMHBwRXuW50vfVGuetkCu6Nefmls6Kr730WqVl/PbWlpKRaLBavVitXacLrUu8vV3GOvLl/2bwjnyGq1YrFYKvz3Vh///YmcLhl5xby/PAWAP57budLtEqJDGNapOUt3HeWr9Qcrzfx4Y/72ittK16Y/ntuFb35NJTTQtJKual7QiSwWC09M6sOavcc4kFnAmB5x/H1yX+IiG84PYdI4+RToOJ1O7rrrLmbPns38+fPp2NG3/udgviBt2LCBCy64wOd9q8vdXjpP7aVFRETES7OWJFNgL6VP6yjOqaKMbNLA1izddZTZaw/wx9Gdq5U1rqqtdG3pFh/Jl3eOICI4oNrZqOjQQP43ZQS7DuVyRsfYJrFGi9R/PoXsd955J//973/54IMPiIyMJC0tjbS0NAoKCjzb3HDDDUyfPt1ze8aMGcydO5fdu3ezZs0arr/+evbu3cutt97qv1dRhYgQV9e1QnVdE6kVTicU59XNpYKF7irzxhtv0LNnz5MaEVx66aX87ne/Y9euXVx66aXEx8cTERHB0KFD+fHHH/12mjZs2MCYMWMIDQ2lefPm3H777eTm5noenz9/PmeccQbh4eHExMQwYsQI9u7dC8D69es599xziYyMJCoqisGDB7Nq1Sq/jU1EyssptDNr6R4A7hzdpcov7hP7JBAcYGXnoVw2Haxeyee6fcfIKrATFRLgWR/ndOnTOvqk9XJ81SIimDM7NVeQI/WGTxmdV199FYDRo0eXu/+dd97hpptuAiAlJaVcmcaxY8e47bbbSEtLo1mzZgwePJilS5fSq1evmo3cB+FBrnV0ihXoiNQKez482apunvuvByHIuw/nK6+8krvvvpuff/6ZcePGAZCRkcGcOXP47rvvyM3N5YILLuBvf/sbwcHBvPvuu1x88cVs27aNdu1qNsE4Ly+PCRMmMGzYMFauXMmhQ4e49dZbmTJlCrNmzaKkpIRJkyZx22238eGHH1JcXMyKFSs8Xxiuu+46Bg4cyKuvvorNZmPdunUqKxOpRf/9JYXswhI6twz3ap5JVEggY3vF8+2vqXy59gB9Wvu+xounrXS3qttKi0jVfC5dq8r8+fPL3X7++ed5/vnnfRqUv7nX0ckrUumaSFPWrFkzxo4dy4cffugJdD777DNatGjBueeei9VqpX///p7tH3/8cWbPns1XX33FlClTavTcH3zwAYWFhbz77rueBZZfeuklLr74Yp566ikCAwPJysrioosuonNnMxegZ8+env1TUlK477776NGjBwBdu3at0XhEpHKF9lL+vXg3YNbN8XYhzkkDWvPtr6n8b/1Bpl/Qs9yaMt742dVWevRpLFsTacyaRK8/d0tDZXREaklgmMms1NVz++DKK69k6tSpvPrqqwQHB/P+++9zzTXXYLVayc3N5dFHH+Xbb78lNTWVkpISCgoKSElJqfEwt2zZQv/+/T1BDsCIESNwOBxs27aNUaNGcdNNNzFhwgTGjRvH2LFjueqqq0hMNCutT5s2jVtvvZX33nuPsWPHcuWVV3oCIhHx3tHcIv7+/VayCytfIyojr5gjucW0aRbKJQO8z1af060lMWGBHM4pYumuI5zd1fuGAodyCtl4INtzHBGpuSaRF3WXrtlLnRSVKKsj4ncWiykfq4uLj7XgEydOxOl08u2337Jv3z4WLVrEddddB8Cf//xnZs+ezZNPPsmiRYtYt24dffv2PW1rBb3zzjssW7aM4cOH8/HHH9OtWzd++eUXAB599FE2bdrEhRdeyE8//USvXr2YPXv2aRmXSGPhdDq599P1fLp6Pz9sSq/0snLPMQDuOKczgT6UkAUFWLmon/lx4su1vv34s8BVtta3dTQtI0/uPCsivmsSGZ3wMotU5RaWEBxhO8XWItKYhYSEcNlll/H++++zc+dOunfvzqBBgwBYsmQJN910E5dddhkAubm57Nmzxy/P27NnT2bNmkVeXp4nq7NkyRKsVivdu3f3bDdw4EAGDhzI9OnTGTZsGB988AFnnXUWAN26daNbt27cc889XHvttbzzzjuesYpI1WYt3cP8bYcJCrDywMQeBAdWHsTEhAZxfh/f14C5bGBr/vtLCnM2pvLEpD6EBnn3nWO+p9uasjki/tIkAh2b1UKQ1Umxw0JeUSnNI+p6RCJSl37zm99wySWXsGnTJq6//nrP/V27duWLL77g4osvxmKx8NBDD53Uoa26rrvuOh555BFuvPFGHn30UQ4fPsxdd93Fb3/7W+Lj40lOTuaNN97gkksuoVWrVmzbto0dO3Zwww03UFBQwH333ccVV1xBx44d2b9/PytXrmTy5Ml+GZtIU7AlNZuZ320F4MELe3LDsA618jyD2jWjbWwo+zIKSNqSziX9qy59Kyl1sMgV6Jyj+TkiftMkStcAgl0/qOQWaZ6OSFM3ZswYYmNj2bZtG7/5zW889z/33HM0a9aM4cOHc/HFFzNhwgRPtqemwsLC+OGHH8jIyGDo0KFcccUVnHfeebz00kuex7du3crkyZPp1q0bt99+O3feeSe///3vsdlsHD16lBtuuIFu3bpx1VVXcf755/PYY4/5ZWwijV2hvZQ/fbiW4lIH5/WI47dnta+157JYLEwa0BqAL9ce8GqftfsyyS4sISYs8LS3lRZpzJpERgdMoJNjV0MCEQGr1crBgyfXz3fo0IGffvqp3H133nlnudu+lLKd2Kmyb9++Jx3fLT4+vtI5N0FBQXz44YdeP6+IlPe3b7ew41AuLSODefqKfrW+zsulA1rzr592smD7YY7mFtE84tRzbua7uq2N6trS505tIlK5JpPRCVFGR0REpMlJ2pzOe7+YhXefvbJ/lUGHP3SJi6Bfm2hKHU4+Xb2/yu1/3mrK1kZrfo6IXzWZQCfY9UpzCxXoiEjNvf/++0RERFR46d27d10PT0SA9OxC7v9sPQC3juzIqNPYtnnyoDYA/P37rTzyv40UFFfc9TU9u5DNqdlYLJzW8Yk0BU2mdC0kwAlYyFNGR0T84JJLLuHMM8+s8LHAwMDTPBoROZHD4eTeT9ZzLN9Or8Qo7pvYveqd/Oi6M9uRfCSPWUv38J9le1m08wgvXD2Afm1iym3nbivdr3U0LU5DtkmkKWkygY4no6NAR0T8IDIyksjIyLoehohU4u2le1m88wghgVZevHYgwQGnd2mJAJuVRy/pzZgecdz32Xp2H87j8leWcvd5XfnD6M4EuNbnmb/dzM8ZrW5rIn7XZErX3HN08oq0YKiIv5w42V4aJv13lMZmXy489+MOAB6+qDdd4upuXYlR3Vryw9RRXNg3kRKHk2eTtnPV68vYezQPe6mDRduPAJqfI1Ibmkyg424vra5rIjXnLs3Kz8+v45GIP7j/O6rkThqD/OIS3t1hw17qZELveK49o21dD4mYsCBe+s1Anr+6P5HBAaxJyeT8fy7i8W82k1NUQmx40EklbSJSc02ndM1mfrFU6ZpIzdlsNmJiYjh0yJRchIWF1Xq7Vn9wOBwUFxdTWFiI1dpkfueplNPpJD8/n0OHDhETE4PNdnpLe0R8tTUtm3eX7aWktPKFfHcfzuVQoYX4qGD+fnntt5L2lsVi4bKBbTijY3Pu/WQdv+zO4N1lphvcqK4t1FZapBY0mUDH015aXddE/CIhIQHAE+w0BE6nk4KCAkJDQ+vNl5/6ICYmxvPfU6Q+e+yrzSzbfbTK7Sw4+cfkPjQLDzoNo/JN65hQPrj1LN5avJtnfthuFjHtGV/XwxJplJpcoKOuayL+YbFYSExMJC4uDrvdXtfD8YrdbmfhwoWMGjVKZVougYGByuRIg5BTaGflngwA7hrThZDAiv/dOkpLydu/jWGdmp/O4fnEarVw+6jOjOkRx7p9WVzYN7GuhyTSKDWZQCdYC4aK1AqbzdZgvijbbDZKSkoICQlRoCPSwCzZeYQSh5NOLcK5d3zlraLtdjvffbf1NI6s+rrERdIlTt0bRWpLkylSVzMCERGRhuvnrWa9mXPUnUxEvNRkAp0QVzMCtZcWERFpWJxOJwu2m0DnXK03IyJeavyBTup6rHP+wllZ3wKQo2YEIiIiDcrWtBzSsgsJDbRxRsfYuh6OiDQQjT/QydqPbfW/6ZK3ClAzAhERkYbm522mu+Owzs0rbUIgInKixh/ohEQDEOwwC+IV2EspdWgVcBERkYZi/jZ32Zrm54iI95pMoBNUenwFdzUkEBERaRiyCuys3nsMgNGanyMiPmhSgU6Aa9Vhla+JiIg0DEt2HqHU4aRzy3DaxobV9XBEpAFpMoGOzWknNsgBKNARERFpKH7eaubnKJsjIr5q/IFOUCROTCYnLrgIUOc1ERGRhsDpdDJfbaVFpJoaf6BjtUJIFADxAYWA1tIRERFpCDYdzOZwThFhQTaGdmxW18MRkQam8Qc6ACExALQILAAgV6VrIiIi9Z57kdDhnVsQHKC20iLim6YR6ASbjE7zABPoaI6OiIhI/Xd8fo7aSouI75pEoON0NSRoZnUFOmovLSIiUq9l5dtZk+JuK61AR0R81yQCHTyBjllLR80IRERE6rdFOw/jcELXuAjaNFNbaRHxXdMIdIJNoBNtMYGOStdERET850huER+uSMFe6vDbMX/eaubnKJsjItUVUNcDOB2crq5rkSjQERER8bfHv9nM/9YdpKTUwW+Hdajx8RwOp6cRgdpKi0h1NY2Mjqt0LcKZB0Cu2kuLiIj4hdPpZMnOIwCs35/ll2NuOpjNkdwiwoNsDOkQ65djikjT06QCnXBnLqCMjoiIiL8kH8njSG4xAFvTsv1yzPnbTLe1EV1aEBTQNL6qiIj/NYl3D6drjk6owwQ6WkdHRETEP1buyfBc356eS4kf5un8vM3dVlplayJSfU0i0ME1RyekRIGOiIiIPy1PPh7oFJc42HM0r0bHO5ZXzLp9mYAaEYhIzTSRQMdkdIJKTEpdpWsiIiL+4c7oBNnMV4otqTk1Ot7CHaatdPf4SFrFhNZ4fCLSdDWJQMdduhZoN2++CnRERERqLi2rkH0ZBVgtMLFPAlDzeToLtrnaSvdQNkdEaqZJBDqExgBgK84BnCpdExER8YMVrmxOr1ZRDO3QDICtNcjolG0rPbqb5ueISM00jUAn2MzRsTqKCcZOXnEpTqezjgclIiLSsK10zc8Z2iGWHonms3ZrWvUDnQ0HsjiaV0xEcABDXIGTiEh1NY1AJygCJxYAosij1OGk0O6/1ZtFRESaohWuQOfMjrF0i48E4EBmAVkF9modb76rbG1klxYE2prGVxQRqT1N413EYsFuCwMgypIPqPOaiIhITWTmF7Mt3WRvhnSIJTo0kNau5gHbqpnVOd5WWvNzRKTmmkagA9ht4QDEBxYCakggIiJSE6v2HAOgU8twWkQEA9AjwWR1qtOQICOvmPX7MwGtnyMi/tGEAh2T0WnpCnSU0REREak+d1vpMzrEeu7rkegOdHzP6CzeeQSn0wRLCdEh/hmkiDRpTS/QCVBGR0REpKbcHdeGlg10ElwNCVJ9z+isSD4KwPDOLfwwOhGRJhjoNA8oAJTRERERqa784hI27M8C4IyOxwOdnq6Mzra0HBwO37qbrkw+5jqeuq2JiH80uUCnmVWBjoiISE2sS8mkxOEkMTqENs1CPfd3aB5OUICVvOJS9h8r8Pp4JzY2EBHxhyYX6ERbTde1vKLSuhyOiIhIg1W2bM1isXjuD7BZ6RYfAcAWHxoSrKygsYGISE01nUAnwBXokAdojo6IiEh1uRsRDO14cvale7x7no73DQncxzuzguOJiFRX0wl0XBmdSFego9I1ERER39lLHazZmwlUHJj0TPS9xbR74dGhKlsTET9qQoGOWUcnwqmMjohIY1ZaWspDDz1Ex44dCQ0NpXPnzjz++OM4nccnxzudTh5++GESExMJDQ1l7Nix7Nixow5H3XBsPJBFgb2UmLBAurSMOOlxT+c1L1tM5xeXsPGAaWygQEdE/KkJBTomoxPmyAWU0RERaayeeuopXn31VV566SW2bNnCU089xdNPP82//vUvzzZPP/00L774Iq+99hrLly8nPDycCRMmUFhYWIcjbxjcZWZD2sditVpOety9ls6eo3nkF1f9WVtZYwMRkZpqcoFOiAIdEZFGbenSpVx66aVceOGFdOjQgSuuuILx48ezYsUKwGRzXnjhBR588EEuvfRS+vXrx7vvvsvBgwf58ssv63bwDcCKKtpAt4gIpkVEME4nbE/PrfJ4y5MrbmwgIlJTAXU9gNPFXboWXGLedFW6JiLSOA0fPpw33niD7du3061bN9avX8/ixYt57rnnAEhOTiYtLY2xY8d69omOjubMM89k2bJlXHPNNScds6ioiKKiIs/t7Gwz/8Rut2O3230eo3uf6uxblxwOJ6tcGZ2BbaIqHX/3+AiO5Bax+cAxeieEn/KY7oVCB7eL9sv5aKjntiHQua0dOq++8/ZcNaFAx2R0guw5gFPtpUVEGqkHHniA7OxsevTogc1mo7S0lL/97W9cd911AKSlpQEQHx9fbr/4+HjPYyeaOXMmjz322En3z507l7CwsGqPNSkpqdr71oXUfMgsCCDI6mTf+qUc3FDxdkH5VsDKnF82Ep7+a6XHK3XA6j02wEJByga+O1LJAauhoZ3bhkTntnbovHovPz/fq+2aXKBjddoJoVilayIijdQnn3zC+++/zwcffEDv3r1Zt24dU6dOpVWrVtx4443VOub06dOZNm2a53Z2djZt27Zl/PjxREVF+Xw8u91OUlIS48aNIzAwsFpjqgvvr9gH67cwpENzLr5oSKXbFa09yM9fbKQotDkXXDC00u3W7cukePkKYkIDufnycRXO+fFVQz23DYHObe3QefWdO6teFZ8CnZkzZ/LFF1+wdetWQkNDGT58OE899RTdu3c/5X6ffvopDz30EHv27KFr16489dRTXHDBBb48dY2VWoNxWmxYnKVEka9AR0Skkbrvvvt44IEHPCVoffv2Ze/evcycOZMbb7yRhIQEANLT00lMTPTsl56ezoABAyo8ZnBwMMHBJy9kGRgYWKMvJjXd/3Rbk2K6o53Rqfkpx927TQwA29JzCQgIqHTuzdr95svKkA6xBAcH+XWsDe3cNiQ6t7VD59V73p4nn5oRLFiwgDvvvJNffvmFpKQk7HY748ePJy8vr9J9li5dyrXXXsstt9zC2rVrmTRpEpMmTWLjxo2+PHXNWSwQEg1AlCVPc3RERBqp/Px8rNbyH282mw2HwwFAx44dSUhIYN68eZ7Hs7OzWb58OcOGDTutY21InE6np+PaGVW0ge4SF4HNaiEz3056dlGl21XV2EBEpCZ8yujMmTOn3O1Zs2YRFxfH6tWrGTVqVIX7/POf/2TixIncd999ADz++OMkJSXx0ksv8dprr1Vz2NUUEg0FGUSRzz4FOiIijdLFF1/M3/72N9q1a0fv3r1Zu3Ytzz33HL/73e8AsFgsTJ06lSeeeIKuXbvSsWNHHnroIVq1asWkSZPqdvD12P5jBaRmFRJgtTCw3akDk+AAG51ahLPjUC5b0rJJiA45aRuH43jgpPVzRKQ21GiOTlaWSWHHxlb+BrVs2bJydc0AEyZMOGULz9rqbuMIisSGyegUlTgoKCwiwNZkOmz7nbqE1B6d29qh8+q7hniu/vWvf/HQQw/xxz/+kUOHDtGqVSt+//vf8/DDD3u2uf/++8nLy+P2228nMzOTkSNHMmfOHEJCTv5CLsYKVxvovm2iCQ2yVbl9j8QodhzKZWtqDud2jzvp8R2HcskqsBMaaKNP62i/j1dEpNqBjsPhYOrUqYwYMYI+ffpUul1aWppPnW2g9rrbZOSV0BKIxpTaffntHMKaTDuG2qMuIbVH57Z26Lx6z9vONvVJZGQkL7zwAi+88EKl21gsFmbMmMGMGTNO38AaOG/L1tx6JETy9XrYmlbxpOEVruMNah9DoH50FKmZ/AzY9RMEhUNkAkQkQHhLsDXtL7rVfvV33nknGzduZPHixf4cD1B73W2ateoI2zcTaysABwwfdS6tYrQKc3WpS0jt0bmtHTqvvvO2s400fit8LDPrmRgJwNbUnAofX5mssjWpB0rtJkjIPwL5RyHP9dd9CY6EwTdDTNu6HmnlNn0J395rXkM5FhPsRMabwCcyHoIioaQA7K5LSWGZv4VQWgTNOkL74eaSOAACvGwU4nRC1n5I22D26TDK+31rSbUCnSlTpvDNN9+wcOFC2rRpc8ptExISSE9PL3dfenq6p+tNRWqru401zNQUtwgoBDsUllr0ZccP1CWk9ujc1g6dV+/pPAnAkdwidh821RBDOnjXOKBHgvlhctfhXIpKSgkOOF7u5nQ6PaVw3maIRLzmcMCueXB0FxRmlblklv9bkAVFWVUfb8mL0P8aGHkPNO9c26P3Xu5h+O5e2Pw/c7tZRzMfPTfdXJwOyDtkLviwRtWR7bDjB3M9IBTaDoV2rsCnzVAICoOSYjiyzQQ1ZS+FmcePExIDvS6BvldC+xFgrbrk1d98CnScTid33XUXs2fPZv78+XTs2LHKfYYNG8a8efOYOnWq576kpKQ66WzjdHVdi7WZUgy1mBYREanaKlc2p0dCJDFh3v1CmxgdQlRIANmFJew6lEevVscrMvYfKyAt27vGBtLIFWabL9aHt5kvzoe3Q24adJ0AZ/4ewnwMhHcvgKSHIXWdDztZzPOEtYCw5hDe3PwNaw77VsCeRbD2PVj3PvSZDGffC3E9fRuXPzmdsPFz+O4+KMgAawCMnAaj/gwBrkSBo9Rkp3LTICfd9TfNZG8CQyEgpPxf93VrAKRvhL1LzaUgA5IXmguYx5t1gGN7wVHBHE5rALTsAXmHTbC15l1ziUiAPpdDnyug9SDTDfk08CnQufPOO/nggw/43//+R2RkpGeeTXR0NKGhpgTshhtuoHXr1sycOROAu+++m3POOYdnn32WCy+8kI8++ohVq1bxxhtv+PmleCHYBDox1gIAtZgWERHxwvJqlJlZLBZ6JEaxIjmDbenZ5QIdXxsbSB1L3wwbPsG2eyFtAocC1VwL0emELV/BniXHg5qcgxVve3AtLHsJht4Cw6ZAxMkNLcpJ2wg/PgI7fzS3gyKhy3kQ2sxkOcpdYo5fD2sOoTGnzjakLIdFz8COubDhU3PpcZEJLFoNrM6ZKM9RirWioKEiOWnwzTTY9q25Hd8XJr0Mif3Lb2e1mVK1yHhIPPkwp9TpHBh2p8mMHdkOKUuPBz7ZB+DoTrNdcDQk9C1/adndBFuOUtizGDZ+Bpu/MoHWL6+YS7OOJmDseyXE9fBxcL7xKdB59dVXARg9enS5+9955x1uuukmAFJSUsqtXzB8+HA++OADHnzwQf7617/StWtXvvzyy1M2MKg1roxOjMVkdBToiIiIVM3TBrqjb7+u90iIZEVyhpmnU+b7oK+NDaQOZB0wX1J//RTSTdmTFRjMakqXtIBz/uzbr/IlxfDtPbD2vyc/FpEALbtBi+6uL8ohsPw1k1lY8k9Y/joMuhFG/AmiT5gykbkPfn4S1n8IOE1GYcgtcM79EN6i2i+/nHZnwnWfQup6WPSs+eK+9Rtz6XyeKWuL6wUtuh7PqJxKSbEJ5PYugb1LCdj3CxcV5cLex83rL3suWnQzgZjTCes/gjkPmPIwa6B5jSPvAVstlRhbrSYQiesBQ35nxpCZYgKd5l0gpl3l/wasNhMwdToHLnjWlBJu+Ay2fQfHkk3guOFTuHt9rWZ3fC5dq8r8+fNPuu/KK6/kyiuv9OWpaoUzxPyaFGlR6ZqIiIg3cgrtbD5omlL4Gpi45+lsSSvfkMDXxgZymhRkmozLr5+YX+Nxfe+zBkLX8ThCYrCufx/b/CcgLw3Of9q7eRf5GfDJDaYEzGI1X5oTB5T/In+igdfD9jmw8Bk4sApWvA6r3oYB18KIqabUbNFzJggqdS1J0vsyGPNQ7c2jSewPV71ryuwWPWe+qO+aZy4AFhvEdjJlbXE9TQlXXC/TBe3gWkhZZrIi+1eayf8unq/5mXvMxT0/xi0i3mSeDm12jWMATHoF4nvXzuusjMUCzdqbiy8CgqD7+eZSnAfbvjeld4n9a72ErWn1nHOVrkU6zYRKBToiIiKntm5fJg4ntI0NrXDhz1Pp4em8drx7X3UaG0gtyTtqvoAfXGuCiV0/Hw8awExA73cl9JoEYbGU2u1sOgJ9DnyAZeVbpoxq8ltmfkdlju6CD64yWYCgCLjiHeg2vuqxWSzmi3G3iZC8wAQ8exaZ+R5r/2uOVeT6d9V+JIybAW0G1+h0eK1ld7j8dRj9AKx4Ew6uMUFIYRYc3WEuW7469THCmrs6m43A3moo81ZsYuyAdgQc22VK+sqW9rmbC9iCzHMOv7vhto0OCoe+V5jLadBAz1I1uUrXwl2BjkrXRERETm3XoVwAeiX6vrxD93gT6BzKKeJobhHNI4I9jQ26x3vf2ED8oDDLlF4dWOMKbtaYMqQTtexpgpu+V5rSpBPsjptAzzPOJeCrP5rSrXcnwbUfVtw0YM9i+Ph6KDgG0W3hNx/7noWwWKDTaHMpO1emKNuMddxj0HX8aZvcXk5sR5j4pLnudJrA79BmOLzV/D201VwvzjWvv/1waDfMdCBr0fX4mO12igIP4Gw/ErqcW/45CrPhyA5T7tV6kMkYideaVKDj7roW6jBv2rlFpXU5HBERkXpvz1FT7t2hRbjP+4YHB9C+eRh7j+azLS2H4V2CPY0NzvBxvo+cQnYqZO0zX7Rz083fnLTyHbfyDle8b/Mu0GqQ+RLdYSTE96kyaHD2mgTRifDhb2DfL/D2RLj+8/Jrzax9H76+23Tmaj0YrvnQTIyvCfdcmbQNJkjrNrFOWhZXyGKBqERz6XLe8fudThNkVlSe542QKJOpOl3ZqkamSQU67tK1kNIcwKmMjoiISBWSj5gqiA7NfQ90wDQk2Hs0ny1pOQzv0qLajQ385theMyG66/jqz+VwlMKPj0JOKrQeYtYZSehXe5PCK1KYDZu+MGVc+1d6t090O2g90AQ2rQZCqwGeahefdRgJv5sD/51syqz+PQ6u+8zMSflpBix+3mzX+zKY9Oqpy9t85e7w1RBYLNUPcqTGmlag42pGYHOWEkqRAh0REZEq7D1a00Anih82pbM1NbtGjQ1qxFEKO5Jg1b/NX5xmMvktc035kS+cTvj+L7DyTXN7w6fmb0CICR7aDIW2Z0CbM2qewajoufcuNcHN5i/BbrJtWGwQ3dp0L4uMP+FvgpnMHt3G9zVpqhLfC25Ngv9eAYe3wDvnQ5shsOsn8/io+2D0X033LpE60LQCncAw03bQUUI0eeQo0BEREamUvdTBvmNm7bmO1ShdA+jpbkiQlsOalOo3NgDMhPTlr5l5Hm3PgnZnmS/WwZEVb597yExeX/0fyCozHyW0mVkt/r+Xwy1JvrUhXvovV5BjgTNug2N7zKKShZmmq1bKsuPbNutgWut2Hev7ay0r+yCs+8AsWJmx+/j9Lbqb7mT9r6l6nZnaEt0Gfvc9fHSdaZe86yczaf6Sf5lxidShphXoWCwmRZt/lChLvjI6IiIip7D/WAGlDichgVbio7xYH6QC7hbT29NzWLbrKFDNttLrPoSfHjfXd883FzDtihP6Hg98EofQPHcrttmzzWR590KMITEmKBh8MwRHmFKrjN3w/pVw49fmvqps/AKSHjLXJ/zNLKoIZmHFozth/woT9OxfCYe2mCBo9u3wp3WeqhKflJbAV1Pg14/B6TD3BUWaFeYH/tYEeXUxCf9Eoc3g+i/g22nm9V/yopl4L1LHmlagA+aNLv8oUeQp0BERETmFPWXm51iq+YW6XWwYoYE2CuylzF67H6hG2dreZfD1n8z1M35vOlbtW266cGWlmG5iqethxesEAiPL7tt6CAy9xcwVKTtP5Pov4N/jTfexT28yncNONcdm71KY/Xtz/cw74Kw/Hn/MajWLPLbsZoIpMGvSvHWeCYCW/gvG/J9vrxlgzSzXQpiYTl0Dr4del5oWvfVNYIhZ20WkHmmCgY6ZdBdlyWevAh0REZFK7anh/BwAq9VCt4RI1u/LJD3brNHiU8e1jGT4+DooLYaeF8PEv5vA4ozbzONZB0znrxRzcaZvpNQSgLXf1VjPvM0sSliRFl1NB69ZF8HOJPjqT+aLekUB3ZEd8OG1Zgw9LoIJT1adSQmNgfMeNgtlLnsJht7q25ydgmPw09/M9fOfhjN/7/2+IgJA05sd5g50yCdP7aVFREQq5cnoVHN+jlvPhONzaFpEBHk/36cwCz68BvKPmoDlstdPntge3Rr6TIYL/gF3LKLkz8l83/cVSi98vvIgx63NELhylpnMv/6D46VxZeUeMp3FCjNNo4HL3/S+pXHPS0xGyZ4PC/7u3T5u85+CggyzVsyQW3zbV0SAphzoWFS6JiIicirJ7jV0mofV6Dg9ygQ6QzvEelcGV1oCn95sFlyMTIRrP/KuZCsoHIfVh4VIu0+Ei/9pri96Fpa/cfyx4jz44CrI3AvNOrrG4MO5sFjMgpZgGiIc2endfoe2wgrXOCbOBFvTK8AR8YemG+iQT25xCU6ns44HJCIiUj95WktXlYFZ9jI8kQCvnQ3f3GPaHx/aaibpAz0Sj0/E97oRwQ/TYdc80zH12o8gqlW1XoNXBv0Wzn3QXP/+ftj0pQm0PvsdHFwLobFmQUxfurO5dRgJXSeAsxTmPVb19k6nee3OUuh+IXQ+1/fnFBGgic/RcTohv7iU8OCmdxpEREROxV7qYL83raWP7YV5M6CkENJ+NZdVb5vHgqOg1UAGxA9knNXKakc37+bnrHjzeEbj8jfMwpa1bdSfIeegGfsXt0Hn82D7HLM+zm8+rv7iogBjHzXzgLZ8BftWmgVGK7P9h+MtmsdXUEonIl5ret/wXYFOtMX8SpVXVKJAR0RE5AT7MvIpdTgJDbQRF3mK1tJzHzRBTvuRpkHAgVWwfzWkroOibEheQEjyAt50VZM5vx0IXcZB13HQevDJ8112/mgW5AQ47xHTgOB0sFjggmfMnJyt38D27wGLCbTanlGzY8f3gv6/gXX/hR8fgZu+rbiZQUmxyeaA6epWk+BKRJpuoNPMan6lyi0qoY6W2BIREam33B3X2jcPq3xOTfJCk6WwWOGCp81Cnr0nmcdKS+DwFti/6njwc3gLloNrTTnYwqfN+iudx0DX8SaDkn/UzMtxlprAYOQ9p+fFulltMPkt03xg7xIzP6bXpf459rnTYeNn5rg75kK3CSdvs/w1s7ZPRLzJMIlIjTTBQCcGOB7oqPOaiIjIyfYcMY0IKi1bKy2B7x8w14f8zgQ5ZdkCzEKeCX1hyM3mvpw0k7HZkQS7fjYtlDd+bi4AQRFQnAvthsPFL9TNYpiBoXDjN6aMLbqN/44b3ca0iF7yT/jxUegytnw2K/cQLHjaXD/vEQiOrPAwIuK9ptuMwGLewHOK7HU5GhERkXppT1WNCFa/A4c2mR8Qz/VyMczIBLPo5VX/gft3w81z4Ox7IaGfebw4F5p1gKv/CwGnKJerbVarf4Mct5H3mPN1aDOs/6j8Y/NmQHEOtBoI/a/1/3OLNEFNMKPj7rqWCyijIyIijZfD4eRoXjEtTzXHphLJ7jV0KmotnZ8BP7sWsxzzIIT5sAComy0A2g8zl/MeNtmelF+g3VkQ3tz34zUEoc1MYJf0kDl/fS43GaSD60ynOoCJT528VpCIVEvT+z8pNAaACI43IxAREWmM/r04maF/+5G5m9J83nevZw2dCjI682easrO4XjD45poO04hMMPN7IhP8c7z66ozbIaoNZB+A5a+bdtJzHgCc0PdKaHdmXY9QpNFoeoGOK6MT7sgDnOQq0BERkUbqxy3pAPywKd2n/YpLHOw/VskcnfTNsPLf5roWs/RdYAiMcZX6LX4OVv0bUpaZ9YLGerHOjoh4rckGOlYchFOojI6IiDRKTqeTrWk5APy6P9Onffcdy8fhhLAgW/myN3f2wVkKPS6CTqP9N+CmpN/VENcbCrPg23vNfSPvgejWdTsukUam6QU6ASFmES4ginwFOiIi0iilZReSVWAa7uw8nOvT592eI+7W0uHlW0tv/QaSF4AtGCb8za/jbVKsNrOIqFt0Wxh+V50NR6SxanqBjsVSpvNaHjkKdEREpBHamprjue50wsYDWV7vu+eou2ytTCMCeyH84Cq5Gn6X6Y4m1dd1nFlDCGDCk6YpgYj4VdMsrA2JhrzDyuiIiEijtSUtu9ztDQeyOLOTd93M9ng6rpWZn7PsJcjcC5GJp38hz8bIYoGr3zdNCVp0revRiDRKTS+jA+UyOmovLSIijZE7o9MsLBCAX/f7ktE5IdDJPgiLnjPXx82A4Aj/DbQpCwpTkCNSi5p2oEO+uq6JiEijtNWV0Zk00Exw96UhwUmLhf74KNjzoM0ZpgWyiEgD0LQDHYtK10REpPEpKill92ETrFw5uC1g5t1k5dur3Le4xMGBY/l0t6TQa+fr8OYY+PVjwALnP2VKrkREGoCmO0cHiCJPGR0REWl0dh3Ko8ThJCokgJ6JkbSLDSMlI58NB7IY2bVFxTuVFMPeJeSv+4qFQV/RxnIElpR5/Ox7ofWg0zJ+ERF/aNqBjkWlayIi0vi4y9Z6JEZhsVjo2yaalIx8fj2QeXKgs3sBrH4Hds6DomxigBgLFBFEcLcx0P186DoBohJP++sQEamJJh3oRJOn0jUREWl03AuF9kyIBKB/m2i+/TWVX/ed0JDgl1dhznTAaW6Ht2Rb9Aie2dOJiJ5jef43I07jqEVE/KuJBjoxgHuOjrquiYhI47Il9XhGB6Bv6xjAtJgGwOGApIdMy2iA/tfCkFug9WDe+2oTSbtTuDPOu1bUIiL1VRMNdI7P0SkudVBUUkpwgK2OByUiIuIf7oxOD1dGp0/rKCwWOJBZwJHMLFok3Q2bZpuNxz4KI6Z6mgzsOWIWC21fdg0dEZEGqIkGOjGAyegA5BUp0BERkcbhSG4Rh3OKsFigW7wJdCJDAunUIpzDh9MJeH8yHF4J1kCY9Ar0u6rc/u7W0h1bKNARkYatiQY6rjk6nkCnhNjwoLockYiIiF9sc2Vz2seGER58/GN+dHwh12Q9RszhAxAcBVf/FzqdU27fopJSDmYWAGUWCxURaaCadKDjzuio85qIiDQWnvk5CVHH70z9lXtTphBmPUyGrQWxv/sK4nuftO++jHwcTogIDqBFhH4AFJGGrUkvGBpBPhYc6rwmIiKNhmd+TqIpW2PnPHjnfMKKD7PV0ZbfWp7EGderwn2TPfNzwrBoYVARaeCadKBjw0E4hcroiIhIo7HN04ggCrb/AB9cBcW5lLY/m2tKHmFTbgRp2YUV7rvXNT+ng+bniEgj0DQDncAQsAUDEIVaTIuISONQUupge7or0ImPgHmPg6MEel+G7befkxAXD8Cv+7Mq3D/5iKsRgebniEgj0DQDHSg3Tye3yF7HgxEREam5PUfzKSpxEBpoo13hNkjfYH7Yu/A5CAimf5sYAH7dn1nJ/sroiEjjoUCHPHKV0RERkUZga5ppRNA9IRLr2lnmzt6TICwWgL5tzGdfZRkd9xo6HZqH1eo4RUROBwU6lnw1IxARkUZha6opW+sfZ4MNn5s7B9/kedyd0dlwIAun01lu30J7KQezXK2lldERkUagyQc60eQp0BERkUbBndGZ4FwM9jxo0Q3aDfM83j0hkiCblcx8O/syCsrtuy8jH6cTIoMDaK615USkEWi6gU5oDABRljx1XRMRkUZhiyuj0y99trlj0I1Qpk10UICVnq620+tPmKfjbkTQvoVaS4tI49B0Ax3PHJ18BToiItLgZRfaOZBZQB/LbiKObgRbEPS/9qTt3PN0NhwoP09n71H3/ByVrYlI46BAR3N0RESkEXCvn3NL6EJzR89LILz5Sdv1ax0DnNx5LdnVca2j5ueISCOhQAeVromISMO3NTWbMAqZ6Fxs7hh8Y4Xb9WtrPv82HsjG4TjekGCPq3RNGR0RaSwU6Fi0YKiIiDR8W9JyuMi2jFBnPsR2gg5nV7hdl5YRhARayS0qYbcruIEypWst1FpaRBoHBTqodE1ERBq+bWk5/Mb2k7kx+KZyTQjKCrBZ6dPKvZ5OJnBCa2lldESkkVCgo65rIiLSwDkcTkjbwADrLpzWQOj/m1Nuf+LCoSnu1tIhAcSqtbSINBJNONCJAdR1TUREGr4DmQVcUpoEgLP7hRDR8pTbuxcOdWd0ksvMz1FraRFpLJpwoHM8o5NfXFpuQqaIiEhDsm1fOpfZTBMC65CbqtzendHZdDCbklIHe10d1zqo45qINCJNPtCJpAALDvKKldUREZGGyblpNlGWAo4EtoKO51S5fcfm4UQGB1BU4mB7ei7JR/Jd96sRgYg0Hk030AmOAsBqcRJJgTqviYhIg9Up5TMAdredDNaqP9qtVgt9WrsXDs083lpaGR0RaUSabqATGAIBIYBpMa15OiIi0iClb6Zz4SbsThuOAaduQlCWez2dX/dneUrX2qvjmog0Ik030IEyDQnUeU1ERBqmkpXvAPCjYxCdO3b2er9+rWMAWLkng4NZhQB0VEZHRBqRJh7olF00VIGOiIg0MPYC+PUjAL4NnEDLyGCvd+3nakiwPT0XgKiQAJqFBfp/jCIidcTnQGfhwoVcfPHFtGrVCovFwpdffnnK7efPn4/FYjnpkpaWVt0x+49n0VBldEREpAHa/BUBxdnsd7YgK3GkT7u2aRZaLrDp0EKtpUWkcfE50MnLy6N///68/PLLPu23bds2UlNTPZe4uDhfn9r/lNEREZGGbPUsAD4sGUP3xGifdrVYLPR1racDZg0dEZHGJMDXHc4//3zOP/98n58oLi6OmJgYn/erVZ6MjgIdERFpYI7sgJSllGLl09JzuC8h0udD9G8TzcLthwF1XBORxsfnQKe6BgwYQFFREX369OHRRx9lxIgRlW5bVFREUVGR53Z2djYAdrsdu93u83O79zlxX2tQJDbMoqFZ+cXVOnZTVtl5lZrTua0dOq++07mqx7b/AMAv9OUQzeiZGOXzIfq2Pp4F6thCa+iISONS64FOYmIir732GkOGDKGoqIi33nqL0aNHs3z5cgYNGlThPjNnzuSxxx476f65c+cSFlb9N+KkpKRyt3sePEI3TEZn2eZtfJe7pdrHbspOPK/iPzq3tUPn1Xv5+fl1PYRqOXDgAH/5y1/4/vvvyc/Pp0uXLrzzzjsMGTIEAKfTySOPPMKbb75JZmYmI0aM4NVXX6Vr1651PHIf7J4PwE/2Plgt0CUuwudD9G8b47mu1tIi0tjUeqDTvXt3unfv7rk9fPhwdu3axfPPP897771X4T7Tp09n2rRpntvZ2dm0bduW8ePHExXl+y9WdrudpKQkxo0bR2Dg8YmX1mU7If1roiz5JLTtwAUX9PD52E1ZZedVak7ntnbovPrOnVFvSI4dO8aIESM499xz+f7772nZsiU7duygWbNmnm2efvppXnzxRf7zn//QsWNHHnroISZMmMDmzZsJCQmpw9F7qaQI9i4BYImjL51aRhASaPP5MPFRIZzZMZb9xwromeD756uISH122krXyjrjjDNYvHhxpY8HBwcTHHxyi8zAwMAafTk5af+wWMB0XSuwO/TFp5pq+t9FKqdzWzt0Xr3XEM/TU089Rdu2bXnnnXc893Xs2NFz3el08sILL/Dggw9y6aWXAvDuu+8SHx/Pl19+yTXXXHPax+yz/SvBnk9+YCxbC9tyUTXm57h9cNtZWACrVR3XRKRxqZN1dNatW0diYmJdPHV56romItLofPXVVwwZMoQrr7ySuLg4Bg4cyJtvvul5PDk5mbS0NMaOHeu5Lzo6mjPPPJNly5bVxZB95ypb2xw6CLBUa36Om81qUZAjIo2Szxmd3Nxcdu7c6bmdnJzMunXriI2NpV27dkyfPp0DBw7w7rvvAvDCCy/QsWNHevfuTWFhIW+99RY//fQTc+fO9d+rqC5XoBOtdXRERBqN3bt38+qrrzJt2jT++te/snLlSv70pz8RFBTEjTfe6FnHLT4+vtx+8fHxla7xdrqa5HjLtvMnrMACey8AurQMU+MIFzUdqT06t7VD59V33p4rnwOdVatWce6553puu+fS3HjjjcyaNYvU1FRSUlI8jxcXF3Pvvfdy4MABwsLC6NevHz/++GO5Y9SZkBgAIi35CnRERBoJh8PBkCFDePLJJwEYOHAgGzdu5LXXXuPGG2+s1jFPV5McbwSU5HHBwTUAfJHZBYCDm1fy3a5qD6NRUtOR2qNzWzt0Xr3nbaMcnwOd0aNH43Q6K3181qxZ5W7ff//93H///b4+zekRGgOYOToqXRMRaRwSExPp1atXuft69uzJ559/DkBCQgIA6enp5cqo09PTGTBgQIXHPF1Ncrxh2fYdlg1OCqM7cSC9BZEhAVw3aRwWi8rPQE1HapPObe3QefWdt41y6qQZQb3hmaNTQEFhcR0PRkRE/GHEiBFs27at3H3bt2+nffv2gGlMkJCQwLx58zyBTXZ2NsuXL+cPf/hDhcc8bU1yvLF3EQAHYs+EdOiREElQUFC1x9BYqelI7dG5rR06r97z9jw17UAn+PivcM6inDociIiI+Ms999zD8OHDefLJJ7nqqqtYsWIFb7zxBm+88QYAFouFqVOn8sQTT9C1a1dPe+lWrVoxadKkuh28N3b/DMDagAEA9FBbaBGRCjXtQCcgCEdAKNaSAgKKs3E6nUr9i4g0cEOHDmX27NlMnz6dGTNm0LFjR1544QWuu+46zzb3338/eXl53H777WRmZjJy5EjmzJlT/9fQydwHR3eCxcqPBd2BfHokVr+1tIhIY9a0Ax0w5Wu5BYQ78ygqcVRrwTUREalfLrroIi666KJKH7dYLMyYMYMZM2acxlH5QfIC87f1YNYeKgWU0RERqUydrKNTn1jcDQksajEtIiL13C5TtlbQ9mzSs0276+41WCxURKQxU6DjbkiAFg0VEZF6zOHwLBS6K3IoAO1iw4gIVnGGiEhFmnygc7zzmjI6IiJSjx3aDPlHIDCMVSVm/ZweyuaIiFRKgU65jE5pHQ9GRESkEq5ua7QfweZDBQD0SNT8HBGRyijQ8WR0VLomIiL1mKtsjU6j2ZpmlkToqYyOiEilFOh4Mjp55CjQERGR+qikCPYuBaC042i2p5tARxkdEZHKKdApM0dHGR0REamX9q0Aez6Et2SvrT2FdgehgTbaxYbV9chEROotBTohMYCZo3PgWEHdjkVERKQiZcvW0nMB6BYfgc2qRa5FRCqjQKfMHJ01KcfqeDAiIiIV8AQ657I1NRvQQqEiIlVRoFNmjs66fZmUlDrqeEAiIiJlFGTCwTXmeqdz2JLmnp+jRgQiIqeiQMcV6MRY8skvLvV0shEREakX9iwCpwOad4XoNmxNU0ZHRMQbCnTcgY7VzM9Zq/I1ERGpT8rMz8kptLMvw7WGjlpLi4ickgIdVzOCUGc+NkpZvVeBjoiI1CPuQKfzuZ620glRITQLD6q7MYmINAAKdEKOp/4jKGC1MjoiIlJfZO6DozvBYoUOI9mSqvk5IiLeUqBjC4TAcACiLXnsyyjgUHZhHQ9KRESE49mc1oMhJJpt7kYEmp8jIlIlBTrgmafTr7m5qTbTIiJSL5RpKw14GhH0VEZHRKRKCnTAE+gMjDOnQ/N0RESkzjkc5RoROJ1OtrpK17qrEYGISJUU6IAn0Okda9bQWZOSWYeDkVpRmA0Ln4GctLoeidQmp7OuRyDiP4c2Qf4RCAyDNkM5kFlATlEJgTYLnVpE1PXoRETqPQU6AKExAHSNKgVgw/4sikpK63BA4nfLX4OfHodvptX1SKS2pG2ApzvBnOl1PRIR/3Bnc9qPgIAgTzanc8sIggL08S0iUhW9UwJEJgAQm7+b5uFBFJc62Hggu44HJX51cK35u+MHyDtSt2MR/3M44OupUJABq942GTyRhm7Xz+Zv5xPn56gRgYiINxTogGeSp2XHXAa1bwbAGs3TaVzSNpi/jhLY8FndjkX8b+17cGCVuV5SCNu+r9vxiPiD+wea9iMA2OLpuKb5OSIi3lCgA9B5DFgD4OhOxrQ0HyRqSNCIFByDrH3Hb6//sO7GIv6XnwE/Pmqux3Yyfzd9UWfDqXcydkNOel2PQnzldEJhlrkeEQ/A1lST0emhjI6IiFcU6IBZNLT9cABGOFYDsDrlGE5NbG4c0jaav2EtTECbug4ObanTIYkfzXvMlKzF9Yar3jP37ZxnAtymLusA/OdSeGeiWXhSGg57AThdc0WDIyi0l5J8JA+AnsroiIh4RYGOW7eJALQ+spBAm4XDOUXsP1ZQx4MSv3CXrbU9E7pOMNeV1Wkc9q+G1f8x1y98BhL6mIDHYYct39Tt2Opa3hF4bxJkpQAWCAiu6xGJL4pzXVcsEBjOzkO5OJwQGx5Ey0j9txQR8YYCHTfXF2Db3qUMTggEtHBoo5Huyugk9IUB15rrv34CDnXWa9AcpfDtNMAJ/a/1ZGXpc5n5u/HzOhtanSvIhPcugyPbIaoN3PA/iIir61GJL4pMGTVBEWC1ssVdtpYQicViqcOBiYg0HAp03Fp0MfX9DjuXR+8ANE+n0Uj71fxN6ANdx0NoM8hJPd66VRqm1e+YMsTgaBg34/j9vS83f5MXQu7hOhlanSrOgw+uNv/uw1uaICembV2PSnzlDnSCTZnaVk8jAs3PERHxlgKdslzla8NKTfcmBTqNQEkxHN5mrif0NeU7fa4wt1W+1nDlHYF5ruBmzIPlsxXNO0PiADO/Ycv/6mR4daakCD6+Hvb9YhZC/u1s8yOONDyeQMcsDOpuLa2OayIi3lOgU1bX8QC0OrwQCw62pGaTV1RSx4OSGjmyHUqLITgKYtqb+/q7yte2fKP1VhqqpEdMR6qEvjDkdyc/3mey+buxCXVfKy2Bz2+BXT9BYDhc95k5P9IwuefoBEfidDrZ4lostEeiAh0REW8p0Cmr/QgIisCWf5gxUak4nLB+X2Zdj0pqwt2IIL4PuOvaWw+CFt2gpAA2N7Ff/BuDlOWw7r/m+oXPgS3g5G16u+bp7F0K2QdP39jqisMBX90FW74GWxBc8z60PaOuRyU1UWaOzuHcIjLyirFaoGucAh0REW8p0CkrIMizAvXkSDOBXeVrDVzZRgRuFgv0v8ZcX//R6R+TVF9pCXx7r7k+8PrKv8zHtDVd9nDCpi9P1+jqhtMJcx6A9R+AxQZXzvK8j0kDVmaOzlZXNqdDi3BCg2x1OCgRkYZFgc6JXPN0zrCvBMx6OtKAeRoRnFDC0+8awAJ7F8OxPad7VFJdq/4N6RsgJAbGPnbqbd3law118dDCLFjyT5j/FKx4EzbNhuRFZg2o3MPHuwb+/DdY8bq5PulV6HFh3Y1Z/KdMoLPN1YigpxoRiIj4pIKajyauyzgAWmRvpiXHWJsSiMPhxGpVO88Gx+k8vlhoQp/yj0W3hk7nmM5r6z+G0X857cMTH+Wkw09PmOvnPQzhLU69fa9L4fu/wP6VcGwvNGtf9XOUFMGH10JOmvk30fOS4yWPp9OxvaZz2uFTLWxrMQ0HCjPNzQuegf5Xn47RyelQZo7OFjUiEBGpFmV0ThQZD60GATAu6FeyCuzsPpJbxU7ecTicpGUV+uVY4oXsg1CQYcp5WvY8+XF3U4L1H5qgSOovRyl8fz8UZUOrgTD4pqr3iUyADiPN9U2zvXuenx6HXfPg0Cb45AZ453yzKOnptH8VvHWeCXIiEmDQjdDjImh7FjTvatqjA+A8HuSc9wiccdvpHafUrgpK13okKqMjIuILZXQq0m0CHFzDpWEb+aD4HFbvPUYXP0wAffK7Lby1OJn3bz2TEV2q+DVaas7diKBFNwgMOfnxnhfDN9PgWDLsWw7tzjq94xPvFGabbmI75gIWuPBZsHo5T6HPZNizyCweOnLqqbfdvQCWvmSu97vGNKpIWQZvjTHHOe+RqrNCmSmw+SvY/KW5PvC35nmDvXz/2PQlzP49lBRCfF/4zccm+3ii0hITxOcfhcAw77JV0rC4Ap3SwHB2HjI/timjIyLiG2V0KuJqMz2weC1B2P3WkGB5cgYAazXv5/RIdwU6lbXYDQo35U2gNXXqq4zd8O9xJsgJCIEr/g2tB3u/f89LTEYv7Vc4srPy7fIzYPYdgNNkiy5/Hf60BgZcB1hMoPTSUEh62MydKTfGZFj8ArxxLrzQF+b+nymXy02HRc/Ai4Ng1TsmOKmM0wmLnoNPbzRBTtcJ8LvvKw5ywHSai4iDuJ4KchorV6BzxB5McamDiOAA2jQLreNBiYg0LAp0KpI4ACLiCXLkM9S61S+BjtPpZM+RPAAOZKp87bRIqyLQARjgKl/bOBvsBbU/JvFe8kJ4cwwc3gqRiXDz98cbDHgrvPnxDmSVNSVwOuGbeyDnIDTvAhOeNPdHtYJJr8DvF0LHUVBaZJoDvDgQfnkVFj0Lr4+CFwfAj4/AwTVgsUKHs818mSvehtjOkHcIvpkKr42EnT+e/PwlxaY19DxXc4Uz74BrP/Q+CySNkyvQ2Z9nspfdEyKx1MV8MRGRBkyBTkWsVuhqmhKcZ13LrsN5HMsrrtEhj+QWk+NafDQ1S1+oTwtPoNOn8m3aj4TotlCUBdu+Pz3jkqqtehveuwwKjpk5c7f9bNY/qo7el5u/Gz+v+PH1H5pSM2sAXP6myfSVldgPbvgKrv3YlEHmHzXtnOfNgNT1JrjpeI5Z0+febXDTN2a+TJ/J8MdfYOJTZl7N4S3w38nw3uWQvtkcu+AYvD8Z1r5njnP+P+D8p7wvzZPGy9WMYE+u+ZhW2ZqIiO80R6cy3SbC2v8yPnA9M0pg7b5jjOkRX+3D7Tma57l+MFOBTq0ryjElRWDmOlTGaoV+V5sSo/UfQp/LT8/4aurITlj4D/Orf3RriGoD0W3M9chEsAXW9Qirp7QEfpgOK94wt/tcAZe+BIE1KNnpcSF8E2QyQ4dO6GKWkQzf3Weuj55eeTBlsUD3idBlLKyZBSvfNqVjvSeZRgGVdYALCIKz7jDd0BY+A8tfN80OXvvZlMXtWw5HtkNQBFzxDnQbX/3XKY2LK6OzK9PcVCMCERHfKdCpTKfRYA2kjSOVjpZU1uzNrFGgk3ykbKCj0rVal74ZcJov/REtT71t/2tNoLNznmlhHFn9/86nzTdTzST7ilisEBFvAp8OZ5sv8AFBp3V41VJwDD69ybT8BtNCeuS0mrd3Do0xAcq277Bung0MMPeXlsAXt5tfztsNh5H3VH0sWwAMvdVcfBpDM5jwNxh6C/z4qGl0sPY981hUa9N04FQlltL0FJmMzhZX5XRPZXRERHym0rXKBEd6WtOOsa6p8TydPWUCndyiErIL7TU6nlTB3Ygg/hRla24tukCboeAshQ2f1u64/GHPYhPkWANhxN0mI9V+JDTrALYgcDogJ9VMiF/8HCx7qWbP53D4ZdiVcjph71J48zwT5ASGw9Xvw9n3+m8NG9fcHuvm2cdbiS96FvavgOAo03zgdJSLxXaCq96Fm+eYILTjOXDrPAU5cjJXRmevq3StmwIdERGfKaNzKt0mwO6fGWNdxwf7LqGk1EGArXqxYdmMDpjytaiEBlpe1BB404igrP7XmsBg/YcwfErtjcsf5v/d/B10A4ybUf4xhwPyDkP2fpOh+vlvsOBp80W/Ot25lr9hJsmf+Xs49//8GwyU2k1mY9nLZiI/QHQ7MxH/VPOqqqPbRAgIxXIsmei4PVgOrIIFT5nHLnwWYtr59/mq0n6YmcsjUhGnE4pNoJPjDKVNs1CiQvR5ISLiK2V0TqXbBADOsG0lwJ7D1rScah+qokBHapE3jQjK6n2ZyYakb4QdSbU3rppKXmSyObYgOHvayY9brab0rvVgGHWfyRqUFJjFNn1dFDV1vZkvU5xrsh/vX2naMNdUQabpXvbP/mZ9nINrwBZsArfbfvJ/kAMQHOH5/7n90QXY/vcHk8HrcwX0u8r/zydSE/Z8k5kFcgmlR4Lm54iIVIcCnVOJ7QTNuxJIKSOtG6pdvuZ0Otl7NB+ALnERgObp1CpH6fGuVgn9vNsnLNYs7gjw2S1weHvtjK0mnE6YP9NcH3SDmYNzKhb34pqBsH0ObP3W++eyF5r5K44S0/UsINRMon9j9PEg0lcZu+H7v8Bzvcx6NNkHILwljP4r3LMJLvlX1fOpasJVvtbxyE9YjiWbbnsXPlt7zydSXa75OQ6sFBBMz0SVrYmIVIcCnaq4fgU+z7a22oFOenYRBfZSbFYLZ3SMBZTRqVVHd5ksRmCYCVa9NXEmtD3LtJr+4CrIO1p7Y6yOPYtg7xKTzRlZQTanIi27w4g/mevf/8XzBapK82aYLmXhcXDdZ3BrEsS0h8y98NY42PCZd8dxOGDXz/DRdWbhzOWvgT0P4nrBpS/D1I0w+i+1G+C4dR2H09U62okFLnvNNCoQqW9c83MKLKGARRkdEZFqUqBTFVegc451HWv2VO+L7+4j5stl22ahtIsNAxTo1Cp3I4K4Xr7NKQkIhmveN1/ojyXDx9dDSVHtjNFXTif87MrmDL7JtJH21tl/NnNQsvfDgr9Xvf3u+fDLy+b6pS+bRTcT+sLt86HzeSaI/PwW+OH/TOeyiuQdMeVpLw2G9ybB1m8AJ3QZB7+dDX9YCgOvh8AQ719HTQWG4ux1GQCO4Xd7mo2I1Due+Tnm/w93JYCIiPhGgU5V2g3DGRxJS0s2zbM3cSjH95KzPUdM2VqHFuG0ijHrgah0rRb52oigrPAWptVvcBSkLIWvp/o+t6U2JC8w47EFe9cGuaygMLjAVaK17BVI31T5tgWZ8OUfzfUhvyu/rktYLFz3qemGBqab23uTTFADx7unfX4rPNfTlKdl7Dbn8ozb4Y/L4frPoPMY/3VT81HpuL+xqOv/4Rj9f3Xy/CJecWV0sh0m0GkWrkYEIiLVoUCnKrZALJ3PA2CMbS1bU31vSOBeLLRji3Bax5gProNZyujUmpoEOgBxPeHKd8x6NOs/gCUv+G1o1eJ0Hu+0NvgmiGrl+zG6jYeeF5sJ+N9Mq7xl9Hf3mbkzsZ1g/BMnP261mfVtrnrPLHK5ZxG8fo5ZDPOVs+Cd802L7tJiaDXQzLu5dytc8A+I6+H7uP0tKJyMiO51FmiJeMUV6ORhfhhTxzURkepRoOMNV/naGOtatqRm+7y7u+NaxzIZnbSsQkod9SBT0BilbTR/a7I2SZexMNHVfvjHx2DL1zUfV3Xtng8py6qXzSlr4t/NGjX7foF1/z358Y2fw4ZPwGKDy98E13yWCvW6xKz/0ryLKYn76XEzpycwzNU97WdT6jbohlMfR0RO5ppLl+MMJchmJThAH9UiItWhd09vdBmHEwt9rXvITV7l8+7uQKdD83DiIkOwWS2UOJwczqkn8z8ak9xDkJsGWMwcnZo483YYehvgNB3IDq7zwwB9VLbT2pCbISqx+seKbgPn/tVcT3q4fLOF7IMm0wMw6s/QZkjVx4vrYdpB97nCZG/O/wdM22KyOK0HVX+cIk1dkflBLY8QIkMCsCgDKSJSLQp0vBHRkrR2FwNw/r7nfJqzUepwkuJqLd2xRTg2q4WEKJWv1Rp32VpsJ7N2Sk1N/LuZU2LPhw+vgezUmh/TB5bkBbBvOQSE1Cyb43bmHRDfBwqOwY8Pm/scDjMvpzDTBCyj7vP+eCHRcMW/TfbmzNvVxUzEH4pNRifXGUpkiNb1FhGpLgU6XnKOe5Q8ZzC9SrdSsvZDr/c7mFlAcamDIJvVU7bWyj1PR53X/C/dD2VrZdkC4MpZ0KI75KSaYKc43z/HrorTiXWhq3xu8M0QmVDzY9oC4MLnzPW1/4W9y2DlW7D7Z7NWzuVvgk3zAUTqlGuOTi6hRIXq/0cRkery+aeihQsX8o9//IPVq1eTmprK7NmzmTRp0in3mT9/PtOmTWPTpk20bduWBx98kJtuuqmaQ64biW068YJlMvfwAfz4CPS6CEKqXtvA3YigXfMwbFZTfpAYHQocU6BTG2raiKAiIdGmE9ubYyB1Hbx8JjRrbzq0hbd0XVzXw1qY7mSOUtOGuaQISgrNX3uZ28GRZh7QKTIgLXM2YD2w0pXNmeq/19PuTBh0I6z5D/zvj6ZsDWD849Ciq/+eR0SqxzVHJxdldEREasLnd9C8vDz69+/P7373Oy6//PIqt09OTubCCy/kjjvu4P3332fevHnceuutJCYmMmHChGoNui5YLBZWxl/D7tSf6JSfBgv/Yb4YVqHs/Bw3tZiuRf5oRFCR2I5wzQfw3mWQlWIuNWULMsFOn8nQbWL5Ujunkx6ps831Ibf4J5tT1thHzdo2GbvN7c7nwdBb/fscIlI97oyOM5TIYGV0RESqy+dA5/zzz+f888/3evvXXnuNjh078uyzZh2Pnj17snjxYp5//vkGFegAdGnVnBn7fsusoH/AL6+ajlJV/AJ+vONamOe+1ipdqx32Ajiy3Vz3d6AD0H4YTN0AhzaZtWPyDpf/m++6np9hyr8CQspcgs3fQNftoztNl7Jt35lLQKjp7tfncug6HsvuBcTm78IZEIplxN3+fy1hsTDucZPRCYkxC4NqwrNI/VDsbi8dooyOiEgN1Po76LJlyxg7dmy5+yZMmMDUqVNr+6n9rmdiFO86BrI25EwGFi6H7/8C139+yi+IezyBzvFf603pmpoReBzdBTHtaj435NAWs05MWHOIrEF3slOJaAkRo/1zrPTNsOkL09Y5Yzds/tJcgiKwBZrA2DH4JmyR8f55vhMN+I1ZTLRFt5p1cxMR/3JldHKcobTQHB0RkWqr9UAnLS2N+PjyX9Ti4+PJzs6moKCA0NDQk/YpKiqiqOh46+XsbNNq0263Y7fbfR6De5/q7FtWlxZmrI/Zr2e2bS2WXfMo2fw1zm6VZ7jcGZ22McGe54+LMB9cBzMLajymuuSP82r59WMCvr4TZ3xfSq6YBTHtq3+sg+sJABxxvSktKan2cU6b2K5w9l9g5P2Q9ivWzbOxbv4SS/Z+LMW5lFiCKB7yBwJr899It4vM3wb879AX/novaEp0ruqAa45OHqF0VEZHRKTa6uU76MyZM3nsscdOun/u3LmEhYVVsId3kpKSajIsikrBgo11ec3Z2GYCfY98TdH/7uWnnkU4rEEnbV/qgJQMG2Bh9/pfyNhq7s8vAQggI8/Ol19/R5CtRsOqc9U+r04n526dSRRgSd+A87VRrO7wBw5F9avW4fru+4ZOwO78cDZ99131xlSnzoBOQ2mWv5OErHUcDe/GoaXrgHV1PK7Gp6bvBU1Jfv5p6jIox5XpuhYZooyOiEh11Xqgk5CQQHp6ern70tPTiYqKqjCbAzB9+nSmTZvmuZ2dnU3btm0ZP348UVFVdzo7kd1uJykpiXHjxhEYWLMPjVd2LWbP0XyOjXoMZ9IqwnNSuSBmF46R95607Z6jeTiWLyEk0Mo1l56P1dV1zel08sT6n8grLqX/sHPo2KJhrhxf0/Nq2fcLAev24wwIxRnXk6CDazhr17M4Rv0Fx8hpYPGt+7nt3VcA6HDWxbTve4HP46lP7HY7W/z0b1aO8+d7QVPhzqjLaeRpRhBClDI6IiLVVuvvoMOGDeO7E35dT0pKYtiwYZXuExwcTHBw8En3BwYG1ujLSU33BzNPZ8/RfLZlWhg1/gn4/BZsS17ANuh6s/J8GfsziwHTcS04uHzGJzEmlJ2HcjmUW0K3xIb9hava53XtuwBY+k7GcuFzMOcBLKvexrbw79jS1sFlr3u/AKXDAembAAhoPQAayZdYf/yblZPpvHpP56kOFJdtL63zLyJSXT4vGJqbm8u6detYt24dYNpHr1u3jpQU0253+vTp3HDDDZ7t77jjDnbv3s3999/P1q1beeWVV/jkk0+45x4/rPJeB3ommozSlrRs0xa4/QizXsrcB0/a9njHtZMzNp4W0021IUHeUTPxHkz75IBguOh5uPQVsAXD9jnwxujj7aKrkrnXdCqyBZnJ9SIiDZHT6cno5DlDldEREakBnwOdVatWMXDgQAYOHAjAtGnTGDhwIA8//DAAqampnqAHoGPHjnz77bckJSXRv39/nn32Wd56660G11rarUdCJABbUnNMt7XznzIlVptmQ/LCctu6FwvtUEGg0+RbTK/7L5QWQ+IAaD3o+P0Dr4Nb5kJ0OziWDG+NhV8/qfp46a6AqGWPmndvExGpK8V5gBOAHEKJUtc1EZFq8/mnotGjR+N0Oit9fNasWRXus3btWl+fql5yZ3R2HsrBXuogMKEvDPkdrHzLtJv+/SKwmdPqyeg0PznQ8bSYboqBjsMBq94x14fecvLjrQbA7xfA57fArp/gi9sg5RezoGVcz4rbeadtMH8TqtfIQESkXnBlc0qdFgoJ0jo6IiI14HNGp6lr0yyUiOAA7KVOdh02ddSc+38QGguHNsOK1z3begKdlpWXrqVmFdb+oOub3T+bbE1wtCn/q0hYLFz3GYy6z9xe9W94dRg81wv+NwU2fQkFx45v7wl0amGhUBGR06XM/BywaI6OiEgNKNDxkcVi8ZSvbU01v7wRFgvnPWSuz30ItnxDUUmpJ1vToYKMTitX6dqBppjRWfW2+dv/Ggg6Rcc5qw3GPGgCni7jICAEcg7C2vfg0xvh6U7w7/Gw4Gk4sMbsk9Cn9scvIlJbikyXuxzMUgrK6IiIVJ/eQauhZ2IUq/YeY0taNpNobe4cdBPsXwXr3ofPbubIhe/icEJEcAAtIk5eY6dVmdI1p9OJpaJyrMYo+yBs+95cH3Kzd/t0HWcu9gLYuxR2zoOdP8KRbbBvubm4xSvQEZEGzL1YqDOE0EAbgTb9HikiUl0KdKqhR2KZhgRuVitc/KL5NW7L18R/+zsGWB6gpMWgCoOYhGiT0Sm0O8jMt9Ms/ORgqFFa8y44S6HdcDPfxheBodDlPHPhScjcB7tcQc+exdDhbO/bUYuI1EflFgvVR7SISE3op6Jq6JFgGhJsTT1hIT1bAEz+N3Q6l4DSfGYFPcXwyEMVHiMk0EaLCLNWUJMpXystgdX/MdcrakLgq5i2MPgmuPq/8Jc9cPV7NT+miEhdKj6e0VGgIyJSMwp0qsE9R+dQThFHc4vKPxgQDNe8z96wPsRY8vjTgfvg6K4Kj9OqqbWY3j7HzLEJawE9L67r0YhIE/H3v/8di8XC1KlTPfcVFhZy55130rx5cyIiIpg8eTLp6el1N0g3V0ZHraVFRGpOgU41hAcH0L65mSi6NS3n5A2Cwnk86lG2ONoRYT8K700yc1NO0KqptZh2NyEYeL0JCEVEatnKlSt5/fXX6devfOv5e+65h6+//ppPP/2UBQsWcPDgQS6//PI6GmUZrmYEec5QdVwTEakhBTrV1NNVvrblxPI1l40ZVm4ofoDCqA6QmQLvToK8o+W2aVItpjOSzXwaLKbcTESkluXm5nLdddfx5ptv0qxZM8/9WVlZ/Pvf/+a5555jzJgxDB48mHfeeYelS5fyyy+/1OGI8TQj0BwdEZGa07toNfVIjGTOprTyDQlcCopLScsuBGIouvYLQj64yHQIe38y3PAVhJggqUm1mF7tWiC0y3kQ27FuxyIiTcKdd97JhRdeyNixY3niiSc8969evRq73c7YsWM99/Xo0YN27dqxbNkyzjrrrJOOVVRURFHR8VLl7GzzI5fdbsdut/s8Nvc+J+5rLcjCBuQSQkSQrVrHbuoqO7dSczq3tUPn1XfenisFOtXkaUiQdnJGZ89Rs1BoTFgg0Ymd4YYv4Z3z4eBa+PAaGD0dWg/yZHQafelaSRGs/a+5PuR3dTsWEWkSPvroI9asWcPKlStPeiwtLY2goCBiYmLK3R8fH09aWlqFx5s5cyaPPfbYSffPnTuXsLCwao8zKSmp3O2Be7fSDsh1hnL4YArffben2sdu6k48t+I/Ore1Q+fVe/n5+V5tp0CnmnolmkBnR3ou9lJHubUO9hwxgY5nodCW3eH6z2HWxbB3CfznIrDYGB3bgxkBrUjO6AlHEyC2EzTG9XQ2fwX5RyGqNXSdUNejEZFGbt++fdx9990kJSUREhLil2NOnz6dadOmeW5nZ2fTtm1bxo8fT1RUlM/Hs9vtJCUlMW7cOAIDj8/FsX36EWRAHqH079mNC0Z38sv4m5LKzq3UnM5t7dB59Z07q14VBTrV1KZZKOFBNvKKS0k+kke3+EjPY7tdgU7HFuHHd2g1EG76GhY/D/tWQs5Bwo5u4oaATVCSBP96EcKaQ5uh0Psy6H/N6X5JtcfdhGDQjaYFt4hILVq9ejWHDh1i0KBBnvtKS0tZuHAhL730Ej/88APFxcVkZmaWy+qkp6eTkJBQ4TGDg4MJDj65iUpgYGCNvpictL/dfH7kOEPpFh6sLz01UNP/NlI5ndvaofPqPW/Pk751VpPVaqFHYhSr9x5jS2p2uUBnT0WBDphg56p3zfWsAzj2rWDWx58wwLKdgYEpWPKPmhbM2+eYBgbn3H+6Xk7tObQFUpaCxQaDflvXoxGRJuC8885jw4YN5e67+eab6dGjB3/5y19o27YtgYGBzJs3j8mTJwOwbds2UlJSGDZsWF0M+bgyC4ZGheojWkSkJvQuWgM9EiJdgU4Olw44fr97jk6HEwOdsqJbY42+jFnfxZCSkc9nNw5iSNB+2PwlLHsJfv4bWG1w9r21+hpqnTub0+MCiGpVt2MRkSYhMjKSPn36lLsvPDyc5s2be+6/5ZZbmDZtGrGxsURFRXHXXXcxbNiwChsRnFaeBUNDiQzWL7siIjWhQKcGeiRW3JAg+YiZINWx+SkCHZdWMSGkZORzIMfBkAFDoe1QU8I27zGYN8NkQkZO9fvYa0XBMUjbCOkbXX83mL+gJgQiUq88//zzWK1WJk+eTFFRERMmTOCVV16p62GVy+iovbSISM3oXbQGeiWacrWya+nkFNo5kmtakHZoUXUnHveioeVaTJ89DRyl8PMT8OMjYA2A4VP8N/DMFJNpcTpgwHWmWYKvnE4s+1fQ4+Bn2D55Hw5thqx9FW/b9kzoOLomIxYRqZH58+eXux0SEsLLL7/Myy+/XDcDqoxnHZ0QokKV0RERqQkFOjXQ3dViOj27iIy8YmLDg9jjyua0iAj2alVrz6KhmScsGnrOfeAshfkzYe7/mTK2s/5QswEf3g5LXoBfPwZHiblvyT+hw9kw5GbocTEEBJ36GNmpsP5DWPcBAUd30B0gvczjMe0gvi8k9IH4PuZvTAewam1aEZFTcjig2JXRcSqjIyJSU3oXrYGI4ADaxYaRkpHP1tRshndpQfJRdyMC79ZVOOVaOuf8xQQkC/8Bcx4wZWxn3u77QA+ug8XPmTbPOM19Hc+BoHDT+GDPInMJbwkDfwuDb4Jm7Y/vX1IE276Hde/Dzh9NJghwBoaxP6I/rYZegq1Vf4jvDaExvo9PREQ883PAXbqmjI6ISE0o0KmhHgmRpGTksyUth+FdWpy8hk4VWsWYNR4OVBToWCxw7v+ZMrbFz8H395nMyNBbvRvc3qWw6FkTnLh1v9CUxrUZYm5n7YfV/4E170Jumnmexc9D13HQ9yo4sAp+/QQKMo4fo+1ZMPB6SrpdyJp5i0gYegE2tUMUEakZV6Bjd9ooIpCIYH1Ei4jUhN5Fa6hHYhRzN6ez1TVPJ9ndWrqlt4HOKTI6YIKd8x42ZWxL/gnf3msyO0NuNo8X50HuIcg77Pp7CHIPw+6fIWWZ6xhW6HMFjLwH4nuVP350Gxjzf6aV9bbvzNyd3fNhx1xzcYtMhP7Xmjk9LbqY++x2r16jiIh4wdWIII8QIoMDsVkb4QLSIiKnkQKdGvI0JEg7IdDxMqOTGG0yOtmFJeQWlVT8C57FAmMfM5mdZS/BN1PNXJvcw57F5SpkCzKByYg/QWwVq2vbAqHXpeZydJcJeHbMhbieppyt8xgzT0hERGqHpxGB5ueIiPiD3klrqIerIcH29FxKSh3eraFTRmRIIFEhAWQXlpCaWUDXMguPlmOxwPgnTLCz/FU4tuf4YwGhENESwuMgIs7MtYlpZ4KcqETfX1TzzjDhb+YiIiKnR5H5wSzHqfk5IiL+oECnhtrFhhEWZCO/uJS1+zLJzDflXN7O0QFTvpadlsOBUwU6YIKdiTNhwLVgLzABTUQcBEWYx0REpOFyLxZKKFGh+ngWEakp9fytIavVQvcEE5x8tyEVMOVooUHel3l5WkxnFVaxJSagSewP7c4ymZfgSAU5IiKNQVHZ1tLK6IiI1JQCHT9wl6/9sDEN8C2bA8c7r1XakEBERBq/MouFao6OiEjNKdDxA3dDgoOujIy383PcEqNNRqfCFtMiItI0uObo5DpDiVJGR0SkxhTo+EGPxKhyt71dLNSttbt0LdOL0jUREWmcyszRUUZHRKTmFOj4gXuOjlvHFhE+7e9ZSydLGR0RkSbLPUcHzdEREfEHBTp+EBUSSJtmoZ7bvmZ03HN0UjMLcTicfh2biIg0EJ5mBJqjIyLiDwp0/MTdkMBqgbaxvgU68VEhWCxQXOrgSF5RbQxPRETquzILhkaFKqMjIlJTCnT8xN2QoFVMKMEB3reWBgi0WYmPPJ7VERGRJsjVjCDPqTk6IiL+oEDHTwZ3iAWgX5voau2vFtMiIk2cqxlBDqFEKdAREakxvZP6yaiuLfjwtrPomRhZ9cYVSIwJhZRMtZgWEWmqyiwYqvbSIiI1p0DHTywWC8M6N6/2/p4W01kqXRMRaYqcRblYgDxC1HVNRMQPVLpWT7SKVumaiEiT5sro5GgdHRERv1CgU08kutfS8UOgk5ZVyFfrD+J0qlW1iEiD4CjFYs8DoNASRliQb01tRETkZPrJqJ5o7Vk0tOala/d9tp5FO47gdDq5dEDrGh9PRERqmasRAQDBkVgslrobi4hII6GMTj3RyhXoHM4poqiktNrHycq3s3TXUQCW7jzql7GJiEgtc62hU+y0ERIaWsXGIiLiDQU69USzsEBCAs1/jrQaZHXmbz9EqcOUrK3cm+GXsYmISC1zd1wjlMhgNSIQEfEHBTr1hMVioVW0+RWvJi2mf9xyyHN99+E8juQW1XhsIiJSy1yBTp4zlKhQVZWLiPiDAp16xF2+lppZvYyOvdTB/G0m0AkNNBNZV+055p/BiYhI7Skuk9FRa2kREb9QoFOPtIqpWYvplckZ5BSW0Dw8iEkDTROCVXtUviYiUu95StdC1FpaRMRPFOjUI4nR7s5r1Qt03GVr5/aI46xOsQCsVKAjIlL/uZoR5DpDiVJGR0TELxTo1COeFtPVKF1zOp3M25oOwNiecQzpYAKdjQezyS8u8d8gRUTE/8o0I4hSRkdExC8U6NQjrWqwaOjOQ7nsPZpPkM3K2V1b0jomlFbRIZQ6nKxLyfTzSEVExK/cc3ScmqMjIuIvCnTqkcQyc3ScTqdP+7rL1oZ1bk54sPk1cGhHk9VZofI1EZH6rWx7aWV0RET8QoFOPdI6JhSb1UJecSlbUnN82nfeluNla27u8jV1XhMRqedcc3TyCCEqVBkdERF/UKBTj4QE2ji/TwIAby7a7fV+R3OLWJ1igpkxPeM99w/t0AyANSnHKCl1+HGkIiLiV66MTo5TGR0REX9RoFPP/H5UZwC+Wn+Q/cfyvdrn522HcTqhV2KUp6EBQLe4SKJCAsgvLmVzanatjFdERPyg2J3R0RwdERF/UaBTz/RtE82ILs0pdTh5e/Eer/apqGwNwGq1eMrXVqp8TUSk/io63oxAXddERPxDgU495M7qfLQyhcz84lNuW1RSysLthwE4r0zZmtsQV/maFg4VEam/nEUm624WDFVGR0TEHxTo1ENnd21Bz8Qo8otL+e8ve0+57S+7M8grLiUuMpi+raNPevyMDscXDvW1k5uIiJwejsKy7aWV0RER8QcFOvWQxWLhjnM6ATBr6R4K7aWVbusuWzuvZxxWq+Wkx/u2iSYowMqR3GL2HPVuzo+IiJxmrtK1Ims4IYG2Oh6MiEjjoECnnrqgbyKtY0I5klvM52v2V7iN0+lknmv9nPN6nFy2BhAcYKN/G5PpWanyNRGResniakZgCYmo45GIiDQeCnTqqUCblVtGdgTgzYW7KXWcXHa2NS2HA5kFBAdYGdGlRaXHGuouX0tWoCMiUu+UlmAtKQDAEhJVx4MREWk8FOjUY1cPbUt0aCB7juYzd1PaSY//uNmUrZ3dtQWhQZWXOrgDnVV71XlNRKTecWVzAGwhkXU4EBGRxqVagc7LL79Mhw4dCAkJ4cwzz2TFihWVbjtr1iwsFku5S0hISLUH3JSEBwdww7D2ALy2YNdJzQR+3OoqW6ug21pZg9o1w2KB5CN5HM4pqp3BiohI9bjn5zgDCQsNq+PBiIg0Hj4HOh9//DHTpk3jkUceYc2aNfTv358JEyZw6NChSveJiooiNTXVc9m799SdxOS4G4d3ICjAyvr9WSwvU3p2KKeQ9fsyATivR1wlexvRYYF0jze/Eq7eq/I1EZF6xZXRMa2l1XFNRMRffA50nnvuOW677TZuvvlmevXqxWuvvUZYWBhvv/12pftYLBYSEhI8l/j4U2cg5LgWEcFcObgNAK8v2OW5/2dXNqd/m2jioqrOkLnL11Ykq3xNRKReKVJraRGR2uBToFNcXMzq1asZO3bs8QNYrYwdO5Zly5ZVul9ubi7t27enbdu2XHrppWzatKn6I26Cbju7ExYL/LztMFvTzKJySZu9K1tz8ywcqoyOiEj94gp08gglSouFioj4jU8/HR05coTS0tKTMjLx8fFs3bq1wn26d+/O22+/Tb9+/cjKyuKZZ55h+PDhbNq0iTZt2lS4T1FREUVFx+eSZGebL/d2ux273e7LkD37lf3b0LSODmJCr3jmbErn9fk7mXFJLxbvPAzAOV1jvXpdA9uYTj6bDmaTmVtAeHDNfzVs6Oe1PtO5rR06r77TuToNXIFODqFEKtAREfGbWs+RDxs2jGHDhnluDx8+nJ49e/L666/z+OOPV7jPzJkzeeyxx066f+7cuYSFVX+iZlJSUrX3rWu9rDCHAP63/iABWfsptNuICXKSvGYxe05eJ7RCscE2Morgjc+T6B5zcrvq6mrI57W+07mtHTqv3svP10LDtc6d0XFqjo6IiD/59I7aokULbDYb6enp5e5PT08nISHBq2MEBgYycOBAdu7cWek206dPZ9q0aZ7b2dnZtG3blvHjxxMV5fsaA3a7naSkJMaNG0dgYMP9tWxJ7kqWJx/jq32BgIMLB7Tjwgt7er3/vLwNfPVrKraErlwwpkuNx9NYzmt9pHNbO3RefefOqEst8jQj0BwdERF/8ukdNSgoiMGDBzNv3jwmTZoEgMPhYN68eUyZMsWrY5SWlrJhwwYuuOCCSrcJDg4mODj4pPsDAwNr9OWkpvvXtTvO6cLy5JUUlTgAGNc7wafXc0an5nz1aypr9mX59Tw09PNan+nc1g6dV+/pPJ0GZZoRNA/V+RYR8Refu65NmzaNN998k//85z9s2bKFP/zhD+Tl5XHzzTcDcMMNNzB9+nTP9jNmzGDu3Lns3r2bNWvWcP3117N3715uvfVW/72KJmJ095aeNtFhQTbO6tTcp/3P6Gg6r63Zm4m91OH38YmISDW4Ax1ldERE/Mrnd9Srr76aw4cP8/DDD5OWlsaAAQOYM2eOp0FBSkoKVuvx+OnYsWPcdtttpKWl0axZMwYPHszSpUvp1auX/15FE2GxWLjrvC5M+WAt5/dJJCTQ5tP+XVpGEB0aSFaBnc0Hs+nfNqZ2BioiIt4rk9FR1zUREf+p1k9HU6ZMqbRUbf78+eVuP//88zz//PPVeRqpwEX9WtEtPpK2zXxvymC1WhjSvhnzth5i5Z4MBToiIvWBa45OHiEKdERE/Mjn0jWpe93iIwkN8i2b4zbUVb62co/W0xERqQ8chabhQ45K10RE/EqBThMz1L1w6J5jOJ3+azEtIiLVU1robi8dSoQCHRERv1Gg08T0aR1NUICVo3nFJB/Jq+vhiIg0ee6MTrEtnECbPpZFRPxF76hNTHCAjQGuuTkqXxMRqQeKzBwdgiPqdhwiIo2MAp0myF2+tnLPsToeiYiIWIpN6RrBkXU7EBGRRkaBThM0pINpSPDT1kM8N3cbX60/yOaD2RTaS+t4ZCIiTY/N1XXNEqJAR0TEnzTrsQka3L4ZoYE2MvKKefGnnZ77LRZo2yyMLnERdImLoGtcBBP7JBCpdqciIrWj1I7NUQRAQEhUHQ9GRKRxUaDTBEWFBPLFH4ezZOcRdh7KZcehXHYeyiWrwE5KRj4pGfn8tPUQAIt3HuGf1wys4xGLiDRSrmwOQECYAh0REX9SoNNE9UyMomfi8Q9Vp9PJkdxidh7KZefhXDYfzOLDFfuYszGN3KISIoL1T0VExO9cgU6hM5Dw0NA6HoyISOOib68CgMVioWVkMC0jgxnWuTlOp5NfdmeQfCSPpM1pXDawTV0PUUSk8SkyjQhyCCUqVB/JIiL+pGYEUiGLxcLF/VsB8NW6g3U8GhGRxsniyujkOUOJ0nxIERG/UqAjlbrEFegs2nGEY3nFdTwaEZFGyJXRySWUyBBldERE/EmBjlSqS1wEvRKjKHE4+X5jWl0PR0Sk8XFndAhRoCMi4mcKdOSUPOVr6w/U8UhERBoh9xwdla6JiPidAh05pYv7JwKwPDmDtKzCOh6NiEjj4pmjQ6jWLBMR8TMFOnJKbZqFMbh9M5xO+HZDal0PR0SkSjNnzmTo0KFERkYSFxfHpEmT2LZtW7ltCgsLufPOO2nevDkRERFMnjyZ9PT00z9Y9xwdp+boiIj4mwIdqdIlnvI1dV8TkfpvwYIF3Hnnnfzyyy8kJSVht9sZP348eXl5nm3uuecevv76az799FMWLFjAwYMHufzyy0//YD3NCEKIClVGR0TEn/TzkVTpgr6JPPb1Jtbvy2Tv0TzaNw+v6yGJiFRqzpw55W7PmjWLuLg4Vq9ezahRo8jKyuLf//43H3zwAWPGjAHgnXfeoWfPnvzyyy+cddZZp22sjsIcbCijIyJSG/SuKlVqGRnM8M4tWLzzCF+vP8iUMV3rekgiIl7LysoCIDY2FoDVq1djt9sZO3asZ5sePXrQrl07li1bVmGgU1RURFFRked2dnY2AHa7Hbvd7vOY3PsU52URCORaQgm2OKt1LCnPfQ51Lv1P57Z26Lz6zttzpUBHvHJJ/1auQCdVgY6INBgOh4OpU6cyYsQI+vTpA0BaWhpBQUHExMSU2zY+Pp60tIpb6c+cOZPHHnvspPvnzp1LWFhYtcd3JHUv4UARIcyZ8321jyMnS0pKqushNFo6t7VD59V7+fn5Xm2nQEe8MqFPAv/35Qa2peewLS2H7gmRdT0kEZEq3XnnnWzcuJHFixfX6DjTp09n2rRpntvZ2dm0bduW8ePHExUV5fPx7HY7SUlJNAsLhDywhERzwQUX1GiMYrjP7bhx4wgM1Lwnf9K5rR06r75zZ9WrokBHvBIdGsg53eL4cUs6X60/wH0JPep6SCIipzRlyhS++eYbFi5cSJs2bTz3JyQkUFxcTGZmZrmsTnp6OgkJCRUeKzg4mODg4JPuDwwMrNEXE4vd1SAhOFJfcPyspv9tpHI6t7VD59V73p4ndV0Tr10ywHRf+3p9Kk6ns45HIyJSMafTyZQpU5g9ezY//fQT/9/encdFWe0PHP88M8DMsIusCgoqbgiaa6hZLrmlmZqZeSvTTM1Ws25WbpXptTTNNG91tfqlpVaalZmKkkouqbjkvhEqIKISjKwz8/z+AOZeEmQdBvD7fr3m5cyzfp/DOGe+c85zTkhISKH17dq1w9HRkaioKOuykydPEh8fT2RkZJXGqsnJG3UNnbSSCyFEZZMWHVFqvVr4YnDUEn8tg0MX/yLMX0ZfE0JUPxMnTmTlypV8//33uLm5We+78fDwwGAw4OHhwZgxY5g0aRJeXl64u7vz7LPPEhkZWaUjrgFoTXktOlq9JDpC2ILZbK72N/nn5ubi4OBAVlYWZrPZ3uFUC46Ojmi12gofRxIdUWrOTg70aunHD4cSWH8wgbC+MiiBEKL6+eijjwC45557Ci1fvnw5o0aNAuD9999Ho9EwdOhQsrOz6dOnD0uWLKniSMEhv+uaxlD2+3yEEMVTVZWkpCRSU1PtHUqJVFXF39+fCxcuoCiKvcOpNjw9PfH3969QmUiiI8rk/tb1+OFQAj8eTuCV3k3sHU6pHbqQSszZFB5sG4ivu97e4QghbKg0XWv1ej2LFy9m8eLFVRBR0RSLCQc1BwBHSXSEqFQFSY6vry/Ozs7VOoGwWCwYjUZcXV3RaOSuElVVycjIIDk5GYCAgIByH0sSHVEm3Zp64653IDk9m31/Xrd3OLeUmWPmh0MJ/N/uPzlyKW8ejZNJ6Sx8+A47RyaEEOBgybQ+17lKoiNEZTGbzdYkp27duvYOp0QWi4WcnBz0er0kOvkMBgMAycnJ+Pr6lrsbmyQ6okx0Dlr6tQpg1b4L/HA4ic7VcHCQs1eMrNgdzzf7L5CWZQJAq1EwW1S2nUjGZLbgoJUPEiGEfTmaswDIUHW45lfqQoiKK7gnpyJzXAn7K/j75ebmljvRkW97oswGts4bfe2Xo5cxWewcTD6T2cLGPxIZ+elues77lWUx50nLMhHkZeDVfs3ZPaUnXi5OpGWZ2F/NW6KEELeHghadG+hx08vvjkJUturcXU2UrDL+fvLJKsossnFdvF11pBizOfmX/T9EMnJMDP1oF8cT8yaP0ijQo7kvI+9syN2hPmg0eTHe09SH72IvsfVEMp0aVf+mbCFE7eZgzkt00lUDbvpq2DwuhBA1nLToiDLTahQGROTdGHYgxf6JzoYjSRxPTMNN78DE7o3Z/kp3Pn28A92b+VqTHIAeLXwB2Hoi2V6hCiGElUN+17Ub6HGXFh0hRCULDg5mwYIF9g7DriTREeVS0H3tyDWFzBz7jvn+zf4LAIzr1oiX+zQnsE7RfXLvCvVBq1E4nWwk/mpGVYYohBA3Kei6ZlSdpUVHCAHkDYv/wgsvVMqxfv/9d5566qlKOVZNJYmOKJe2DTyp76kn26Lw2rqjZJvsk+xcuJbB7nPXUBQY3Dbwltt6GBzpEFwHgK0nLldFeEIIUayCFh2jtOgIIUpJVVVMJlOptvXx8bntB2SQREeUi6IovNq3GRpF5ccjSTy+bC9/ZVb9zMPfHbgEQOfGdanvWfKoRT2a53dfO3nFpnEJIURJCu7RMWLA3SAtOkLc7p544gl+/fVXFi5ciKIoKIrCZ599hqIo/Pzzz7Rr1w6dTsfOnTs5e/YsgwYNws/PD1dXVzp06MCWLVsKHe/vXdcUReHTTz9l8ODBODs7Exoayvr160sVm9lsZsyYMYSEhGAwGGjWrBkLFy68abtly5YRFhaGTqcjICCAZ555xrouNTWVcePG4efnh16vp1WrVvz444/lK6xSkkRHlFvfMD/GNbfgotOy+9w1hi39jUupmSXvWElUVeXbAxcBeLDdrVtzCvRo7gfA7rNXuZFdul9EhBDCFpSCe3RUGXVNCFtTVZWMHJNdHqWZxBhgwYIFREZGMnbsWBITE0lMTCQoKAiAV199lTlz5nD8+HEiIiIwGo3079+fqKgoYmNj6du3LwMHDiQ+Pv6W55g5cyYPPfQQhw8fpn///owcOZJr166VGJvFYiEwMJA1a9Zw7Ngxpk2bxmuvvcbq1aut23z00UdMnDiRp556iiNHjrB+/XqaNGli3b9fv37ExMTw5ZdfcuzYMebMmVPuYaNLSz5ZRYU091RZOaYDY/8vllOXjQxZEsOyUR0Iq+dh83P/Hned+GsZuDhp6RPmX6p9Gvu40MDLmfhrGcScSaF3KfcTQojKpuTmDy+tOGNwtG1lL8TtLjPXTMtpv9jl3Mfe7IOzU8lfuT08PHBycsLZ2Rl//7zvJydOnADgzTff5N5777Vu6+XlRevWra2v33rrLdauXcv69esLtaL83ahRoxgxYgQA77zzDh988AF79+6lb9++t4zN0dGRmTNnWl+HhISwa9cuVq9ezUMPPQTA22+/zUsvvcTzzz9v3a5Dhw4AbNmyhb1793L8+HGaNm0KQKNGjUosk4qSFh1RYS0D3Fk7sQtN/Vy5nJbN8H/vZsdp23cN+3Z/XmvOfREBpfoAgbxmW2v3NRl9TQhhRwUtOrkOLjLfhxDiltq3b1/otdFoZPLkybRo0QJPT09cXV05fvx4iS06ERER1ucuLi64u7uTnFy670OLFy+mXbt2+Pj44Orqyscff2w9X3JyMgkJCfTs2bPIfQ8ePEhgYKA1yakq0qIjKkV9TwNrxndm3P/tY/e5azyx/HdmDwlnWPsgm5wvM8fMT0cSARhawiAEf9ejuS+f/RbH1hPJqKoqXzCEEHahzb9Hx+zgaudIhKj9DI5ajr3Zx27nrigXF5dCrydPnszmzZt57733aNKkCQaDgQcffJCcnJxbHsfRsfD9gIqiYLGUPPv7119/zeTJk5k3bx6RkZG4ubnx7rvvsmfPHgAMhlvfJ13SeluRREdUGg+DI5+P7sgr3xzm+4MJvPzNYRJSs3iuZ5NKTyZ+OZqEMdtEkJeBDsFeZdq3UyMvnJ20JKdnczQhjVb1bd/NTggh/q4g0bE4SaIjhK0pilLq3h/25OTkhNlc8ki2MTExjBo1isGDBwN5LTxxcXE2iysmJobOnTvz9NNPW5edPXvW+tzNzY3g4GCioqLo3r37TftHRERw8eJFTp06VaWtOtJ1TVQqnYOW9x9qw4R7GgPw/pZTjP1iPxv/SCQjp/Ju/i8YhGBo28BCk4KWNsauTbwB6b4mhLAfx/yua6okOkKIfMHBwezZs4e4uDhSUlKKbW0JDQ3lu+++4+DBgxw6dIhHHnmkVC0z5RUaGsq+ffv45ZdfOHXqFFOnTuX3338vtM2MGTOYN28eH3zwAadPn+bAgQMsWrQIgLvvvptu3boxdOhQNm/ezPnz5/n555/ZuHGjzWIGSXSEDWg0Cv/s25y3HmiFRoEtxy8z/ssD3PHmZsZ89jurfo8nxZhd7uMnpGay80wKUPZuawV6tsi7TydKEh0hhJ045k8Yqujc7ByJEKK6mDx5MlqtlpYtW+Lj41PsPTfz58+nTp06dO7cmYEDB9KnTx/atm1rs7jGjRvHkCFDGD58OJ06deLq1auFWncAHn/8cRYsWMCSJUsICwtjwIABnD592rr+22+/pUOHDowYMYKWLVvyyiuvlKr1qiKqfxueqLEevbMhbQI9WXfwEpuOJXHhWiZRJ5KJOpGMohyhXYM69A7zo3dLf4K9XUo+YL61sZdQVegU4kWQV/kmwureLC/ROXwxlSvp2fi46cp1HHu5nJbFyj3xjOzUAF93vb3DEUKUg5Mlr0VHo3e3cyRCiOqiadOm7Nq1q9CyUaNG3bRdcHAwW7duLbRs4sSJhV7/vStbUcNcp6amliounU7H8uXLWb58eaHls2fPLvR63LhxjBs3rshjeHl5sWzZslKdr7JIi46wqfBAD6YOaMn2l7uz8YW7mHRvU8Lre6CqsO/P67yz4QT3vBfN3I0nSnU8VVWto60NLeXcOUXxdddb44g+WbNadSwWlWdWHmBh1GkmrDiA2VK68fmFENWLTs1r0XEwSKIjhBC2IImOqBKKotDc353neobyw7NdiXm1BzPvD7PeK7Mk+ixf7b31kIgAsRdSOZdyA4Ojlv7hARWKqWCY6W01LNH5+vcL/B53HYD9f17nPzvP2TkiIUR56NW8Fh1HF0l0hBD2NX78eFxdXYt8jB8/3t7hlZt0XRN2Ud/TwOOdg3m8czAfRJ1m/uZTTF33Bw29nOmcn/wU5Zv81px+rfxx1VXs7dujuS8Lo06z/VQKOSYLTg5Vk/enGLO5dD2T1kGeZd43OS2L2T8fB6BLk7rEnLnKe5tO0b2ZL6F+0s9fiBrDlI0jeQO06Fxk5EchhH29+eabTJ48uch17u4198cYadERdvdsjyYMalMPk0Vl/Jf7OXfFWOR2WblmfjiUAMCDFei2ViC8vgferjqM2Sb2xV2r8PFKIyE1k74LtjNocQwr95TcgvV309cfJT3LROtADz5/oiPdm/mQY7Iwec0hTGbbjbYihKhkOf/9nDNIi44Qws58fX1p0qRJkQ9fX197h1dukugIu1MUhX8NjaBtA0/SskyM+XwfqRk3T3i1+dhl0rNM1Pc0cGejuhU+r0aj0L2ZD1A1o69l5pgZ+8U+Uox51zbt+z/47WxKqfffdDSJn/9IQqtRmD0kAgethjlDI3DXO3Do4l8s/fVsyQcRQlQP2ekA3FB1uBpkQBEhhLAFSXREtaB31PLvR9tT39PA+ZQbTPjyADmmwi0UBXPnDGlbv8xz5xSnYJjpbTZOdFRV5eVvDnE0IQ0vFyd6tfDDZFF5esUB4lJulLh/elYu074/CsBT3RrRsl7eL8B+7npmDgoDYGHUaY4lpNnuIkSZSAubuKX8Fp0bGHDXO5awsRBCiPKQREdUGz5uOv4zqj0uTlp2nbvKtO//sA6FeDkti+2nrgAwpJxz5xSla6gPjlqFcyk3iu0y97/OJBu5kl72OYCWRJ/lx8OJOGgUPhrZlg8fuYPWQZ6kZuQy5vPfScvKveX+czeeJCkti+C6zjzfM7TQugfa1Kd3Sz9yzSovrTl0U4IoqpbZojLlu8Pc8eZmNh1Nsnc4oppS8lt00lUDbnq5XVYIIWxBEh1RrTT3d2fRI3egUfJGF/vPzvMArIu9hEWFdg3rEFKGOXdK4qpzoFNIXje4rbdo1cnKNfPWj8foNf9XerwXzU+HE0t9ji3HLvPeppMAzBwURqdGddE7avnk0XYEeOg5e+UGz6yMLbYFYP+f1/hyz58AvDM4HL2jttB6RVGYNTicOs6OHE9MY9HW00UdRlQBi0Xlte+O8NXeC6Rnm3hp9SHOl6LFTtyG8hMdI5LoCCGErUiiI6qdHs39eP2+lgDM2nCcLccuW7utVcYgBH/XvYRhpo8lpDHowxhr0pWebWLiygNM+/4Psk23ntH31OV0nv86FlWFf9zZgJGdGlrX+brr+eSx9ugdNWw/dYVZG47ftH+OycKr3x5BVWFYu8BiR6TzcdPx9gPhQF7r0aELqSVed3mYLSqX07I4dCGVjX8k8flvcfxr4wn+b/eft31XLVVVmb7+KKv2XUCjQCMfF9KzTUz4cj+ZObad+VnUPJbs/K5rqh53g3RdE0IIW5CfkUS1NLpLMGevGFm5J56nVxwgx2xB56DhvoiKzZ1TlJ7NfXnrx2PsOXeN9Kxc3PL7y1ssKp/uPMd7v5wix2zB29WJdwaHE3shlY+iz/LFrj85EH+dxY+0pWHdm1uZUjNyGPvFPm7kmLmzkRfTB4bdtE2r+h68/1AbJqw4wPKYOJr6uTGiYwPr+o93nOd0spG6Lk68fl+LW17HfREB/PxHAD8eTuSlNYf48dmuN7X+lIaqqly8nsnRhDSOJaZx+nI6iX9lkfRXFleM2cVOUBpzOoWFI9qgcyj7OSvDxesZzPn5BG2CPHmiSwjaSrqPqzRUVeXtn47zf7v/RFFg3kOt6dzYm/s+2MmJpHTeWPcH7w2LQFGqLiZRveXcSMUJadERQlSu4OBgXnjhBV544QV7h1ItyKerqJYURWHm/WH8efUGMWeuAtAnzN8mN+0Ge7vQyNuFcyk32Hk6hX7hAVxKzeSl1QfZfS5v2OleLfyYMzQcb1cdvcP86RjixaRVB/njUhoDPtjJ3Acj6Pc/E5iazBaeWRnLn1czCKxjYMnIdjhqi25A7RcewEv3NmVe/lxCwXVdaN/AncuZsGRv3mSg0wa2xNPZqcRreWtQK3afu8aZZCPvbz7FlP63To6yTWZOXzZyLDGNY/mJzfHENNKzTMXuo9Uo+Ljq8PfQ4++up46LI9/uv8TGo0mM/ux3/v1o+wrPcVRWJ5PSeWzZHi6nZfPj4UQ2H7vM/OFtqO9psPm5VVVl7i8nrS1+c4aEM/iOvJbHRSPuYOSnu/n2wEXaB9cplMSK21t2RhquQKZisNuPA0IIUdtJoiOqLUethiWPtGPwRzGcu3LDpl8SezT35dzO80SdSCbHbOGNdX+QnmXC2UnLtAEtGd4hqNCv8d2b+bLh+bt4dmUs+/68zoQVBxjVOZgp/Zujc9Aya8Nxdp5JwdlJyyePtcfL5dZJyjM9mnA62cj6QwlMWLGfNU91ZNVZLblmlXua+XB/63qluo46Lk7MHhLO2C/28fGOc/QO86NNUB0uXc/kXIqRuJQbxF3N4FzKDeJSbnDxegZFNdA4aTWE+rnSMsCd5gHu1Pc0EOChx99Dj7er7qbWkgER9Rj7xT5izlxl5Kd7+GxUB+qUcM2VZe/5azz5+e+kZZkIrutMcno2e85fo++C7cwaHF7qsiuvhVGn+Sg6b2jvtwaFMbzDf9+nkY3rMrlPM+ZuPMn09UcJr+9Bq/oyOaQAU0beCInZ2sq751AIIURhco+OqNY8nB35fmIXfnquK5GNKz53TnF65N+nszb2Es9/fZD0LBN3NPBkw3N38XDHBkV2OQrwMPDVU3cy/u7GAHz2WxwPfrSLD7eeZnlMHADzH2pDi4CSJwNUFIW5D0bQOtCD1Ixchi7dw9l0BWcnLW8/0KpMXZ7ubenHkLb18+4L+nQvzaf+TLd3tzFq+e/M+OEYn/0Wx/ZTV4i/lpfkeBgciWxUlzFdQ5g3rDU/P38Xf8zsw0/P3cW7w1ozpmsIfVv50zrIEz93fZFdwro08Wbl2DvxdHbk0IVUHvr3LpL+yip1zOW16WgSj/5nD2lZJto3rMP3E7uy4bm7aBPkSXqWiee+iuXFVQdLHNWuvJZEn2HBlrzBH964rwWPRgbftM34bo3p1cKXHJOF8V/u568M28QiahZTVl6iY3KQREcIkefjjz+mXr16WCyF73kdNGgQo0eP5uzZswwaNAg/Pz9cXV3p0KEDW7ZsKff55s+fT3h4OC4uLgQFBfH0009jNBYegTYmJoZ77rkHZ2dn6tSpQ58+fbh+/ToAFouFuXPn0qRJE3Q6HQ0aNGDWrFnljscWpEVHVHtuekfC6tn2V/D2wV646RxIzzah1Sg81yOUid0b41BMd7MCjloNr/ZrTqcQL15cfZAjl/7iyKW/AHihVyh9W/mXOga9Y17rz/0fxpCUlpckvNCzCYF1nMt8PdMHhrH77FUS8pMNJwcNwXWdCa7rQoiPCyF1XQjxznv4uOkq5d6RNkGerBkXyaP/2cvpZCNDP/qNL5/sVKmj5P2vr/bG8/raI1hU6NXCl0Uj2mJw0uLh7Mia8ZEs2nqGD7eeZm3sJfaev8aCh9vQpr5bpZ3/PzvPM3dj3mh6r/RtxpN3NSpyO41GYd6wNgz4cAcXrmUyafVBPnmsfaXNBSVqJktm3qhrZgdXO0cixG1CVSE3wz7ndnSGUtSzw4YN4/nnn2fbtm307NkTgGvXrrFx40Y2bNiA0Wikf//+zJo1C51OxxdffMHAgQM5efIkDRqUvdeLRqPhgw8+ICQkhHPnzvH000/zyiuvsGTJEgAOHjxIz549GT16NAsXLsTBwYFt27ZhNucNsDNlyhQ++eQT3n//fbp27UpiYiInTpwocxy2VK5EZ/Hixbz77rskJSXRunVrFi1aRMeOHYvdfs2aNUydOpW4uDhCQ0P517/+Rf/+/csdtBCVzclBwwv3NmXT0SSm9G9BmyDPMu3fvbkvG567i2e/imX/n9fp18qf53qElrzj3/i66/n08fY88slufJ1yeezO8nXX8zA4sm5iF85cMdLAy5l6HoYq+WId6ufGNxPykp3zKTcYtvQ3Ph/dsVITVVVV+XDrGeZtPgXAQ+0DeWdweKGk1FGrYdK9Tbm7qTcvrDrIhWuZDP/3LsZ1CyG0goPDZZvMfLUnnrd+PAbA8z1DefqeJrfcx8PZkY9GtmPIR78RdSKZpdvPlriPqN32tHyNASf70C7Am0ftHYwQt4PcDHjHtl2Zi/VaAjiV/KNfnTp16NevHytXrrQmOt988w3e3t50794djUZD69atrdu/9dZbrF27lvXr1/PMM8+UOaz/HbAgODiYt99+m/Hjx1sTnblz59K+fXvra4CwsLyBldLT01m4cCEffvghjz/+OACNGzema9euZY7Dlsqc6KxatYpJkyaxdOlSOnXqxIIFC+jTpw8nT57E19f3pu1/++03RowYwezZsxkwYAArV67kgQce4MCBA7Rq1apSLkKIyjCmawhjuoaUe/96ngZWPXUnxxLTCKvnUe7EolV9D3575W42b/qlQiOH+brr8XXXl3v/8gqs48zqcZE8vmwvxxLTePjj3Swb1YEOwV4VPrbZojLzh6N8sStvXqGJ3RszuXezYluk2jX0YsNzdzFj/TG+PXCRj349T31nLQc4jqezDg+DIx4GR9zz/8177kCuWeXi9QwuXs/8n3/znienZ5M/jy0T7mnMC71Kl9C2qu/BzPvDmPLdEd775SRtgjzp3Pjm4cKN2SYOX0wlNj6V44lp+LnriQj0ICLQk+C6zjJyWy2RlqPwF644Ocs9W0KI/xo5ciRjx45lyZIl6HQ6VqxYwcMPP4xGo8FoNDJjxgx++uknEhMTMZlMZGZmEh8fX65zbdmyhdmzZ3PixAnS0tIwmUxkZWWRkZGBs7MzBw8eZNiwYUXue/z4cbKzs60JWXVV5kRn/vz5jB07lieeeAKApUuX8tNPP7Fs2TJeffXVm7ZfuHAhffv25eWXXwbyss/Nmzfz4YcfsnTp0gqGL0T14qDVEBHoWeHj6By11OSeTT5uOr4edydPfraPvXHXePQ/e3gsMpg6zk64Gxxw0zvirs/71yP/tavOAQetglZR0GqUm77QZ5vMvLjqIBuOJKEoMH1AS0Z1KTkxddM7Mu+h1vRo7suU7w5zKcPEl3suVOj6nJ20PNk1hBfvbVqmxOPhDkHsi7vOtwcu8txXsfzwbFfSMk0cvHCd2PhUDl5I5dTl9CIHiABw1zsQnp/0RNT3ICLIk3oeekl+aqC0/JENZWhpIaqIo3Ney4q9zl1KAwcORFVVfvrpJzp06MCOHTt4//33AZg8eTKbN2/mvffeo0mTJhgMBh588EFycnLKHFJcXBwDBgxgwoQJzJo1Cy8vL3bu3MmYMWPIycnB2dkZg6H4kUtvta46KdMnbE5ODvv372fKlCnWZRqNhl69erFr164i99m1axeTJk0qtKxPnz6sW7eu2PNkZ2eTnZ1tfZ2WlnfTZm5uLrm5Zb+Rt2Cf8uwriiflaju1oWwNWvj00Tt4btUhok+l8PH2c2XaX1HAQaOgyU98VFUlM9eCo1bh3aHh3BfuX6by6d3Cm7AJHVm4dge+gY24kWvhr8xc0jJN/JWV929aVi5/ZebioFEIrGOgnqeBQE8D9evo8/71NFC/jgEvZ0cURcFkKn4Y7uJMv68ZRy+lcuKykc5ztlpbh/5XPQ89bYI8aBngzuW0LI4kpHEsMZ20LBMxZ65ah1wHcNU54OSQX06KgqKARlHQaBQ0CvnLFNZPjETnUPbxZ2rye7A6S88fIMOtiodiF+K2pSil6j5mb3q9niFDhrBixQrOnDlDs2bNaNu2LZA3MMCoUaMYPHgwAEajkbi4uHKdZ//+/VgsFubNm4dGk1c3rF69utA2ERERREVFMXPmzJv2Dw0NxWAwEBUVxZNPPlmuGKpCmT5hU1JSMJvN+Pn5FVru5+dX7M1HSUlJRW6flJRU7Hlmz55dZKFu2rQJZ+ey35hdYPPmzeXeVxRPytV2akPZ3l8HfEIUkjMVMs2QaYJMM2SZFevzTBOoFG6VUFXINavAfzMBvVZldFMzyoUDbChno8w9AYD5bN6Yky75j7+dN6+BJAfIG1iCNFDT4GI8XCzfaQsZGgDzU7RkmhWcNCoNXFWCXaGhm0pDVxUPJyNgBOMlgjTQPhDM9SAxE+KNCheMCvE3FBIy8rq6kV3iKfll40bKkeeQkWGnm3drOWO2tOgIIYo2cuRIBgwYwNGjR/nHP/5hXR4aGsp3333HwIEDURSFqVOn3jRCW2k1adKE3NxcFi1axMCBA4mJibmpp9WUKVMIDw/n6aefZvz48Tg5ObFt2zaGDRuGt7c3//znP3nllVdwcnKiS5cuXLlyhaNHjzJmzJgKXX9lqpafsFOmTCnUCpSWlkZQUBC9e/fG3b3koXr/Ljc3l82bN3Pvvffi6Fj5E07erqRcbae2le3AEtbntdaYMVtUzBYwq2r+cxWLqmKyqFgsKr5uOlwq8At4dSrXfvdmkZqRS2MflxJH9ytOdq6Zi6lZmC0WLGrePUyqmld+FjWvzCwqWFSVDg3rlOu+sYIWdVG5xnYNoe6NeAZGBJS8sRDittKjRw+8vLw4efIkjzzyiHX5/PnzGT16NJ07d7YmGuX9jG7dujXz58/nX//6F1OmTKFbt27Mnj2bxx57zLpN06ZN2bRpE6+99hodO3bEYDDQqVMnRowYAcDUqVNxcHBg2rRpJCQkEBAQwPjx4yt28ZWsTN8YvL290Wq1XL58udDyy5cv4+9f9DC6/v7+ZdoeQKfTodPpblru6OhYoS8nFd1fFE3K1XZup7J1qpr5RYHqUa6BdR0JrODUUI6OjjR3tu2AE/Yup9qqYV1nmnmqNKxb/l4KQojaSaPRkJBw8/1EwcHBbN26tdCyiRMnFnpdlq5sL774Ii+++GKhZY8+WngcyLvvvpuYmJhi43z99dd5/fXXS33OqlamnxGdnJxo164dUVFR1mUWi4WoqCgiIyOL3CcyMrLQ9pDXHae47YUQQgghhBCiosrcX2LSpEl88sknfP755xw/fpwJEyZw48YN6yhsjz32WKHBCp5//nk2btzIvHnzOHHiBDNmzGDfvn3lGu9bCCGEEEIIcWsrVqzA1dW1yEfBXDi3gzJ3dh8+fDhXrlxh2rRpJCUl0aZNGzZu3GgdcCA+Pt46egNA586dWblyJW+88QavvfYaoaGhrFu3TubQEUIIIYQQwgbuv/9+OnXqVOS626lLcrnu6n3mmWeKbZGJjo6+admwYcOKnXBICCGEEEIIUXnc3Nxwc3Ozdxh2V76hfoQQQgghhBCiGpNERwghhBBC1DpqUTMyixqjMv5+kugIIYQQQohao+AeFJnwuGYr+PtV5J6iajlhqBBCCCGEEOWh1Wrx9PQkOTkZAGdnZxSl7BMmVxWLxUJOTg5ZWVmFBvS6XamqSkZGBsnJyXh6eqLVast9LEl0hBBCCCFErVIwMX1BslOdqapKZmYmBoOhWidkVc3T09P6dywvSXSEEEIIIUStoigKAQEB+Pr6kpuba+9wbik3N5ft27fTrVu322ro51txdHSsUEtOAUl0hBBCCCFEraTVaivlC7MtabVaTCYTer1eEp1KJh0BhRBCCCGEELWOJDpCCCGEEEKIWkcSHSGEEEIIIUStUyPu0SmYMCgtLa1c++fm5pKRkUFaWpr0faxEUq62I2VrG1KuZVfwuSsT7xUm9VL1JWVrO1K2tiHlWnalrZtqRKKTnp4OQFBQkJ0jEUKI21N6ejoeHh72DqPakHpJCCHsr6S6SVFrwM90FouFhIQE3NzcyjW+eFpaGkFBQVy4cAF3d3cbRHh7knK1HSlb25ByLTtVVUlPT6devXoykd3/kHqp+pKytR0pW9uQci270tZNNaJFR6PREBgYWOHjuLu7yxvIBqRcbUfK1jakXMtGWnJuJvVS9SdlaztStrYh5Vo2pamb5Oc5IYQQQgghRK0jiY4QQgghhBCi1rktEh2dTsf06dPR6XT2DqVWkXK1HSlb25ByFdWFvBdtR8rWdqRsbUPK1XZqxGAEQgghhBBCCFEWt0WLjhBCCCGEEOL2IomOEEIIIYQQotaRREcIIYQQQghR69xWiY6iKKxbt87eYdQ6Uq5VJy4uDkVROHjwoL1DqVWkXIU9yWeobUi5Vg35/LQdKduKq3WJzuLFiwkODkav19OpUyf27t1r75BqvBkzZqAoSqFH8+bN7R1WjbR9+3YGDhxIvXr1iqyEVVVl2rRpBAQEYDAY6NWrF6dPn7ZPsDVISeU6atSom97Dffv2tU+w4rYkdVPlk7qpcki9ZDtSN9lfrUp0Vq1axaRJk5g+fToHDhygdevW9OnTh+TkZHuHVuOFhYWRmJhofezcudPeIdVIN27coHXr1ixevLjI9XPnzuWDDz5g6dKl7NmzBxcXF/r06UNWVlYVR1qzlFSuAH379i30Hv7qq6+qMEJxO5O6yXakbqo4qZdsR+om+6tVic78+fMZO3YsTzzxBC1btmTp0qU4OzuzbNmyIrefPn06AQEBHD58uIojrXkcHBzw9/e3Pry9vYvdVsq1eP369ePtt99m8ODBN61TVZUFCxbwxhtvMGjQICIiIvjiiy9ISEgotvuF2Wxm9OjRNG/enPj4eBtHX33dqlwL6HS6Qu/hOnXqFLutlKuoTFI32Y7UTRUn9ZLtSN1kf7Um0cnJyWH//v306tXLukyj0dCrVy927dpVaFtVVXn22Wf54osv2LFjBxEREVUdbo1z+vRp6tWrR6NGjRg5cmSR/8GkXCvm/PnzJCUlFXoPe3h40KlTp5vewwDZ2dkMGzaMgwcPsmPHDho0aFCV4dY40dHR+Pr60qxZMyZMmMDVq1eL3E7KVVQmqZtsS+om25J6yfakbrItB3sHUFlSUlIwm834+fkVWu7n58eJEyesr00mE//4xz+IjY1l586d1K9fv6pDrXE6derEZ599RrNmzUhMTGTmzJncdddd/PHHH7i5uQFSrpUhKSkJoMj3cMG6Akajkfvuu4/s7Gy2bduGh4dHlcVZE/Xt25chQ4YQEhLC2bNnee211+jXrx+7du1Cq9Vat5NyFZVN6ibbkbrJ9qResi2pm2yv1iQ6pfXiiy+i0+nYvXv3LZu4xX/169fP+jwiIoJOnTrRsGFDVq9ezZgxYwAp16o2YsQIAgMD2bp1KwaDwd7hVHsPP/yw9Xl4eDgRERE0btyY6OhoevbsaV0n5SrsRT5Dy07qpupFPj/LTuom26s1Xde8vb3RarVcvny50PLLly/j7+9vfX3vvfdy6dIlfvnll6oOsdbw9PSkadOmnDlzxrpMyrXiCt6nJb2HAfr378/hw4eL7DogStaoUSO8vb0LvYdBylVUPqmbqo7UTZVP6qWqJXVT5as1iY6TkxPt2rUjKirKusxisRAVFUVkZKR12f3338/KlSt58skn+frrr+0Rao1nNBo5e/YsAQEB1mVSrhUXEhKCv79/ofdwWloae/bsKfQeBpgwYQJz5szh/vvv59dff63qUGu8ixcvcvXq1ULvYZByFZVP6qaqI3VT5ZN6qWpJ3WQDai3y9ddfqzqdTv3ss8/UY8eOqU899ZTq6empJiUlqaqqqoC6du1aVVVVdc2aNaper1fXrFljx4hrhpdeekmNjo5Wz58/r8bExKi9evVSvb291eTkZFVVpVzLIj09XY2NjVVjY2NVQJ0/f74aGxur/vnnn6qqquqcOXNUT09P9fvvv1cPHz6sDho0SA0JCVEzMzNVVVXV8+fPq4AaGxurqqqqvv/++6qrq6u6Y8cOe11StXCrck1PT1cnT56s7tq1Sz1//ry6ZcsWtW3btmpoaKialZWlqqqUq7AtqZtsQ+qmyiH1ku1I3WR/tSrRUVVVXbRokdqgQQPVyclJ7dixo7p7927ruv/90FNVVV21apWq1+vVb7/91g6R1hzDhw9XAwICVCcnJ7V+/frq8OHD1TNnzljXS7mW3rZt21Tgpsfjjz+uqqqqWiwWderUqaqfn5+q0+nUnj17qidPnrTu//cPPVVV1Xnz5qlubm5qTExMFV9N9XGrcs3IyFB79+6t+vj4qI6OjmrDhg3VsWPHWr9kqqqUq7A9qZsqn9RNlUPqJduRusn+FFVVVdu1FwkhhBBCCCFE1as19+gIIYQQQgghRAFJdIQQQgghhBC1jiQ6QgghhBBCiFpHEh0hhBBCCCFErSOJjhBCCCGEEKLWkURHCCGEEEIIUetIoiOEEEIIIYSodSTREUIIIYQQQtQ6kugIUUlGjRrFAw88YO8whBBCCEDqJSEk0RFCCCGEEELUOpLoCFFG33zzDeHh4RgMBurWrUuvXr14+eWX+fzzz/n+++9RFAVFUYiOjgbgwoULPPTQQ3h6euLl5cWgQYOIi4uzHq/gF7eZM2fi4+ODu7s748ePJycnxz4XKIQQokaRekmIojnYOwAhapLExERGjBjB3LlzGTx4MOnp6ezYsYPHHnuM+Ph40tLSWL58OQBeXl7k5ubSp08fIiMj2bFjBw4ODrz99tv07duXw4cP4+TkBEBUVBR6vZ7o6Gji4uJ44oknqFu3LrNmzbLn5QohhKjmpF4SoniS6AhRBomJiZhMJoYMGULDhg0BCA8PB8BgMJCdnY2/v791+y+//BKLxcKnn36KoigALF++HE9PT6Kjo+nduzcATk5OLFu2DGdnZ8LCwnjzzTd5+eWXeeutt9BopOFVCCFE0aReEqJ48k4Vogxat25Nz549CQ8PZ9iwYXzyySdcv3692O0PHTrEmTNncHNzw9XVFVdXV7y8vMjKyuLs2bOFjuvs7Gx9HRkZidFo5MKFCza9HiGEEDWb1EtCFE9adIQoA61Wy+bNm/ntt9/YtGkTixYt4vXXX2fPnj1Fbm80GmnXrh0rVqy4aZ2Pj4+twxVCCFHLSb0kRPEk0RGijBRFoUuXLnTp0oVp06bRsGFD1q5di5OTE2azudC2bdu2ZdWqVfj6+uLu7l7sMQ8dOkRmZiYGgwGA3bt34+rqSlBQkE2vRQghRM0n9ZIQRZOua0KUwZ49e3jnnXfYt28f8fHxfPfdd1y5coUWLVoQHBzM4cOHOXnyJCkpKeTm5jJy5Ei8vb0ZNGgQO3bs4Pz580RHR/Pcc89x8eJF63FzcnIYM2YMx44dY8OGDUyfPp1nnnlG+kELIYS4JamXhCietOgIUQbu7u5s376dBQsWkJaWRsOGDZk3bx79+vWjffv2REdH0759e4xGI9u2beOee+5h+/bt/POf/2TIkCGkp6dTv359evbsWeiXtJ49exIaGkq3bt3Izs5mxIgRzJgxw34XKoQQokaQekmI4imqqqr2DkKI29moUaNITU1l3bp19g5FCCGEkHpJ1BrS/iiEEEIIIYSodSTREUIIIYQQQtQ60nVNCCGEEEIIUetIi44QQgghhBCi1pFERwghhBBCCFHrSKIjhBBCCCGEqHUk0RFCCCGEEELUOpLoCCGEEEIIIWodSXSEEEIIIYQQtY4kOkIIIYQQQohaRxIdIYQQQgghRK0jiY4QQgghhBCi1vl/V6kvpXaHbzgAAAAASUVORK5CYII=",
            "text/plain": [
              "<Figure size 1000x500 with 2 Axes>"
            ]
          },
          "metadata": {},
          "output_type": "display_data"
        }
      ],
      "source": [
        "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 26,
      "metadata": {
        "id": "e0JxP6FE9L8Y"
      },
      "outputs": [],
      "source": [
        "a =py7zr.SevenZipFile(r'./test.7z','r')\n",
        "a.extractall(path=r'./competitions/cifar-10/')\n",
        "a.close()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 27,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "KSDxPyov9i-y",
        "outputId": "f2a4bc9b-6d9c-446c-9d46-df5cbbdd1ef4"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "300000\n"
          ]
        }
      ],
      "source": [
        "!ls competitions/cifar-10/test|wc -l"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 28,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "H39dl-23-cil",
        "outputId": "32a7cf35-7252-4116-92f1-2b1ea50e2939"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "competitions   sample_data\t     trainLabels.csv\n",
            "kaggle.json    sampleSubmission.csv  wangdao_deeplearning_train.py\n",
            "model_weights  test.7z\n",
            "__pycache__    train.7z\n"
          ]
        }
      ],
      "source": [
        "!ls"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 29,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.818553Z",
          "start_time": "2025-06-26T01:45:37.816716Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "Yvx48aMb4pNw",
        "outputId": "b064e7fa-c273-4315-e1cd-7f02ca14d706"
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
            "  warnings.warn(\n"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "正在预测测试集...\n"
          ]
        },
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "\r预测进度:   0%|          | 0/2344 [00:00<?, ?it/s]/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
            "  warnings.warn(\n",
            "预测进度: 100%|██████████| 2344/2344 [01:56<00:00, 20.14it/s]\n"
          ]
        },
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "id列是否有重复值: False\n",
            "预测完成，结果已保存至 cifar10_submission.csv\n"
          ]
        }
      ],
      "source": [
        "# 导入所需库\n",
        "import os\n",
        "import pandas as pd\n",
        "from PIL import Image\n",
        "import torch\n",
        "from torch.utils.data import Dataset, DataLoader\n",
        "from torchvision import transforms\n",
        "import tqdm\n",
        "\n",
        "# 定义测试数据集类\n",
        "class CIFAR10TestDataset(Dataset):\n",
        "    def __init__(self, img_dir, transform=None):\n",
        "        \"\"\"\n",
        "        初始化测试数据集\n",
        "\n",
        "        参数:\n",
        "            img_dir: 测试图片目录\n",
        "            transform: 图像预处理变换\n",
        "        \"\"\"\n",
        "        self.img_dir = img_dir\n",
        "        self.transform = transform\n",
        "        self.img_files = [f for f in os.listdir(img_dir) if f.endswith('.png')]\n",
        "\n",
        "    def __len__(self):\n",
        "        return len(self.img_files)\n",
        "\n",
        "    def __getitem__(self, idx):\n",
        "        img_path = os.path.join(self.img_dir, self.img_files[idx])\n",
        "        image = Image.open(img_path).convert('RGB')\n",
        "\n",
        "        if self.transform:\n",
        "            image = self.transform(image)\n",
        "\n",
        "        # 提取图像ID（文件名去掉扩展名）\n",
        "        img_id = int(os.path.splitext(self.img_files[idx])[0])\n",
        "\n",
        "        return image, img_id\n",
        "\n",
        "# 定义预测函数\n",
        "def predict_test_set(model, img_dir, labels_file, device, batch_size=64):\n",
        "    \"\"\"\n",
        "    预测测试集并生成提交文件\n",
        "\n",
        "    参数:\n",
        "        model: 训练好的模型\n",
        "        img_dir: 测试图片目录\n",
        "        labels_file: 提交模板文件路径\n",
        "        device: 计算设备\n",
        "        batch_size: 批处理大小\n",
        "    \"\"\"\n",
        "    # 图像预处理变换（与训练集相同）\n",
        "    transform = transforms.Compose([\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
        "    ])\n",
        "\n",
        "    # 创建测试数据集和数据加载器\n",
        "    test_dataset = CIFAR10TestDataset(img_dir, transform=transform)\n",
        "    test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=4)\n",
        "\n",
        "    # 设置模型为评估模式\n",
        "    model.eval()\n",
        "\n",
        "    # 读取提交模板\n",
        "    submission_df = pd.read_csv(labels_file)\n",
        "    predictions = {}\n",
        "\n",
        "    # 使用tqdm显示进度条\n",
        "    print(\"正在预测测试集...\")\n",
        "    with torch.no_grad():\n",
        "        for images, img_ids in tqdm.tqdm(test_loader, desc=\"预测进度\"):\n",
        "            images = images.to(device)\n",
        "            outputs = model(images)\n",
        "            _, predicted = torch.max(outputs, 1) #取最大的索引，作为预测结果\n",
        "\n",
        "            # 记录每个图像的预测结果\n",
        "            for i, img_id in enumerate(img_ids):\n",
        "                predictions[img_id.item()] = predicted[i].item() #因为一个批次有多个图像，所以需要predicted[i]\n",
        "\n",
        "    # 定义类别名称\n",
        "    class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
        "\n",
        "    # 将数值标签转换为类别名称\n",
        "    labeled_predictions = {img_id: class_names[pred] for img_id, pred in predictions.items()}\n",
        "\n",
        "    # 直接创建DataFrame\n",
        "    submission_df = pd.DataFrame({\n",
        "        'id': list(labeled_predictions.keys()),\n",
        "        'label': list(labeled_predictions.values())\n",
        "    })\n",
        "    # 按id列排序\n",
        "    submission_df = submission_df.sort_values(by='id')\n",
        "\n",
        "    # 检查id列是否有重复值\n",
        "    has_duplicates = submission_df['id'].duplicated().any()\n",
        "    print(f\"id列是否有重复值: {has_duplicates}\")\n",
        "    # 保存预测结果\n",
        "    output_file = 'cifar10_submission.csv'\n",
        "    submission_df.to_csv(output_file, index=False)\n",
        "    print(f\"预测完成，结果已保存至 {output_file}\")\n",
        "\n",
        "# 执行测试集预测\n",
        "img_dir = r\"competitions/cifar-10/test\"\n",
        "labels_file = r\"./sampleSubmission.csv\"\n",
        "predict_test_set(model, img_dir, labels_file, device, batch_size=128)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 30,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "f90sLQwP_o3I",
        "outputId": "153e34a0-e14c-4621-bb45-69b5fb29052c"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "id,label\n",
            "1,deer\n",
            "2,airplane\n",
            "3,automobile\n",
            "4,ship\n",
            "5,bird\n",
            "6,cat\n",
            "7,airplane\n",
            "8,horse\n",
            "9,bird\n"
          ]
        }
      ],
      "source": [
        "!head -10 cifar10_submission.csv"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 31,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "R1w9Z1W-AOgY",
        "outputId": "c3c8265f-d7f1-4326-c663-559b532e3200"
      },
      "outputs": [
        {
          "name": "stdout",
          "output_type": "stream",
          "text": [
            "300001 cifar10_submission.csv\n"
          ]
        }
      ],
      "source": [
        "!wc -l cifar10_submission.csv"
      ]
    }
  ],
  "metadata": {
    "accelerator": "GPU",
    "colab": {
      "gpuType": "T4",
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.12.3"
    },
    "widgets": {
      "application/vnd.jupyter.widget-state+json": {
        "257f1a5b41cb452594f3fdea255cc3e7": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "FloatProgressModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "FloatProgressModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "ProgressView",
            "bar_style": "danger",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_8d259ce1b86544a7976ea92681bea157",
            "max": 35200,
            "min": 0,
            "orientation": "horizontal",
            "style": "IPY_MODEL_26edd4d49d42446f9b4fd1467258e87d",
            "value": 18500
          }
        },
        "26edd4d49d42446f9b4fd1467258e87d": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "ProgressStyleModel",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "ProgressStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "bar_color": null,
            "description_width": ""
          }
        },
        "312aa4960d4c46679e8772ebcab23a48": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "3be244a7a21b4e9b8b453cd02d60f009": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "DescriptionStyleModel",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        },
        "49492015d05746d6b5509b547a59ef9f": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "HTMLModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_312aa4960d4c46679e8772ebcab23a48",
            "placeholder": "​",
            "style": "IPY_MODEL_ac414069f3b04cf59591cf4569060054",
            "value": " 53%"
          }
        },
        "64664de1cfd242fbae10a4e355e1a71d": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "6ffee922c3534303b77040167f5ea168": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "HBoxModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HBoxModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HBoxView",
            "box_style": "",
            "children": [
              "IPY_MODEL_49492015d05746d6b5509b547a59ef9f",
              "IPY_MODEL_257f1a5b41cb452594f3fdea255cc3e7",
              "IPY_MODEL_7b327c3be2714bfbbdfd40c6de04e7af"
            ],
            "layout": "IPY_MODEL_64664de1cfd242fbae10a4e355e1a71d"
          }
        },
        "7b327c3be2714bfbbdfd40c6de04e7af": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "HTMLModel",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_a5bbd3a5d0594820bdbbf5b02927d73c",
            "placeholder": "​",
            "style": "IPY_MODEL_3be244a7a21b4e9b8b453cd02d60f009",
            "value": " 18500/35200 [16:18&lt;12:20, 22.54it/s, epoch=26, loss=0.0014, acc=100.00%]"
          }
        },
        "8d259ce1b86544a7976ea92681bea157": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "a5bbd3a5d0594820bdbbf5b02927d73c": {
          "model_module": "@jupyter-widgets/base",
          "model_module_version": "1.2.0",
          "model_name": "LayoutModel",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "ac414069f3b04cf59591cf4569060054": {
          "model_module": "@jupyter-widgets/controls",
          "model_module_version": "1.5.0",
          "model_name": "DescriptionStyleModel",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        }
      }
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}
