{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "EPU_V8M5aGiK"
      },
      "source": [
        "# 10-cifar"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 1,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:32.363026Z",
          "start_time": "2025-06-26T01:43:29.447990Z"
        },
        "id": "We_D_UbfaGiM"
      },
      "outputs": [],
      "source": [
        "import torch\n",
        "import torchvision\n",
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "from torchvision import datasets, transforms\n",
        "from deeplearning_train import EarlyStopping, ModelSaver,train_classification_model,plot_learning_curves\n",
        "from deeplearning_train import evaluate_classification_model as evaluate_model\n",
        "import torchvision.models as models\n",
        "import torch.nn as nn"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 2,
      "metadata": {
        "id": "D7lUvbSNaGiQ"
      },
      "outputs": [],
      "source": [
        "# # 理解AdaptiveAvgPool2d\n",
        "# # 创建一个随机输入张量，模拟特征图\n",
        "# # 形状为 [1, 3, 6, 6]，表示1个样本，3个通道，6x6的特征图\n",
        "# input_tensor = torch.randn(1, 3, 6, 6)\n",
        "# print(\"输入张量的形状:\", input_tensor.shape)\n",
        "\n",
        "# # 创建 AdaptiveAvgPool2d 层，指定输出大小为 2x2，\n",
        "# adaptive_pool = nn.AdaptiveAvgPool2d(output_size=(2, 2))  # 创建一个自适应平均池化层，将输入特征图池化为2x2的输出\n",
        "\n",
        "# # 对输入张量进行自适应平均池化\n",
        "# output_tensor = adaptive_pool(input_tensor) # 对输入张量进行自适应平均池化，输出形状为[1,3,2,2]\n",
        "# print(\"输出张量的形状:\", output_tensor.shape)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {
        "id": "e-MNsvcJaGiR"
      },
      "outputs": [],
      "source": [
        "import json\n",
        "token = {\"username\":\"zhangyudataset\",\"key\":\"6ae9a985be19950353520e31297702b4\"}\n",
        "with open('/content/kaggle.json', 'w') as file:\n",
        "  json.dump(token, file)  # json.dump类似于write，直接把字典类型数据变为字符串写入文件\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 4,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "KhuXgNFMaGiR",
        "outputId": "c19c8410-e7b9-4e98-a1e0-38d69881edb4"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "- path is now set to: /content\n"
          ]
        }
      ],
      "source": [
        "!mkdir -p ~/.kaggle\n",
        "!cp /content/kaggle.json ~/.kaggle/\n",
        "!chmod 600 ~/.kaggle/kaggle.json\n",
        "!kaggle config set -n path -v /content"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 5,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "oWc-GrCraGiR",
        "outputId": "7e91f4c1-0fbd-4c0a-d8e9-820a7a6350a2"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Downloading cifar-10.zip to /content/competitions/cifar-10\n",
            " 94% 675M/715M [00:10<00:00, 64.9MB/s]\n",
            "100% 715M/715M [00:10<00:00, 70.1MB/s]\n"
          ]
        }
      ],
      "source": [
        "# 需要先参加比赛才能下载数据集\n",
        "!kaggle competitions download -c cifar-10"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 6,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "qPJdBpjXaGiR",
        "outputId": "ab01da21-b927-4cc6-f59f-6ef81dceb7c6"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Archive:  /content/competitions/cifar-10/cifar-10.zip\n",
            "  inflating: sampleSubmission.csv    \n",
            "  inflating: test.7z                 \n",
            "  inflating: train.7z                \n",
            "  inflating: trainLabels.csv         \n"
          ]
        }
      ],
      "source": [
        "!unzip /content/competitions/cifar-10/cifar-10.zip"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 7,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "QJbEjrVHaGiR",
        "outputId": "1e8576f8-a02e-4b6b-8df1-837a009f54e8",
        "collapsed": true
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "Collecting py7zr\n",
            "  Downloading py7zr-1.0.0-py3-none-any.whl.metadata (17 kB)\n",
            "Collecting texttable (from py7zr)\n",
            "  Downloading texttable-1.7.0-py2.py3-none-any.whl.metadata (9.8 kB)\n",
            "Requirement already satisfied: pycryptodomex>=3.20.0 in /usr/local/lib/python3.11/dist-packages (from py7zr) (3.23.0)\n",
            "Collecting brotli>=1.1.0 (from py7zr)\n",
            "  Downloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.5 kB)\n",
            "Requirement already satisfied: psutil in /usr/local/lib/python3.11/dist-packages (from py7zr) (5.9.5)\n",
            "Collecting pyzstd>=0.16.1 (from py7zr)\n",
            "  Downloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.5 kB)\n",
            "Collecting pyppmd<1.3.0,>=1.1.0 (from py7zr)\n",
            "  Downloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.4 kB)\n",
            "Collecting pybcj<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.7 kB)\n",
            "Collecting multivolumefile>=0.2.3 (from py7zr)\n",
            "  Downloading multivolumefile-0.2.3-py3-none-any.whl.metadata (6.3 kB)\n",
            "Collecting inflate64<1.1.0,>=1.0.0 (from py7zr)\n",
            "  Downloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.4 kB)\n",
            "Requirement already satisfied: typing-extensions>=4.13.2 in /usr/local/lib/python3.11/dist-packages (from pyzstd>=0.16.1->py7zr) (4.14.0)\n",
            "Downloading py7zr-1.0.0-py3-none-any.whl (69 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m69.7/69.7 kB\u001b[0m \u001b[31m5.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.9 MB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.9/2.9 MB\u001b[0m \u001b[31m71.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (96 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m96.4/96.4 kB\u001b[0m \u001b[31m8.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading multivolumefile-0.2.3-py3-none-any.whl (17 kB)\n",
            "Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (50 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.7/50.7 kB\u001b[0m \u001b[31m4.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (141 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m141.3/141.3 kB\u001b[0m \u001b[31m11.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)\n",
            "\u001b[2K   \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m412.9/412.9 kB\u001b[0m \u001b[31m26.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
            "\u001b[?25hDownloading texttable-1.7.0-py2.py3-none-any.whl (10 kB)\n",
            "Installing collected packages: texttable, brotli, pyzstd, pyppmd, pybcj, multivolumefile, inflate64, py7zr\n",
            "Successfully installed brotli-1.1.0 inflate64-1.0.3 multivolumefile-0.2.3 py7zr-1.0.0 pybcj-1.0.6 pyppmd-1.2.0 pyzstd-0.17.0 texttable-1.7.0\n"
          ]
        }
      ],
      "source": [
        "# 安装py7zr库，用于解压7z格式的压缩包\n",
        "%pip install py7zr  # 在Jupyter环境下安装py7zr库\n",
        "\n",
        "# 导入py7zr库\n",
        "import py7zr  # 导入py7zr模块以便后续解压操作\n",
        "\n",
        "# 创建一个SevenZipFile对象，打开'./train.7z'文件，模式为只读\n",
        "a = py7zr.SevenZipFile(r'./train.7z', 'r')  # 用于读取7z压缩包\n",
        "\n",
        "# 将压缩包中的内容全部解压到指定目录'./competitions/cifar-10/'\n",
        "a.extractall(path=r'./competitions/cifar-10/')  # 解压所有文件到目标文件夹\n",
        "\n",
        "# 关闭SevenZipFile对象，释放资源\n",
        "a.close()  # 关闭文件，完成解压流程"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "1FQvU7owaGiT"
      },
      "source": [
        "# 把数据集划分为训练集55000和验证集5000，并给DataLoader"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 8,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.144223Z",
          "start_time": "2025-06-26T01:43:33.135368Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "TBDumxASaGiT",
        "outputId": "b947f1df-70fb-4ab9-f648-dc5fcb2e0f63"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "完整数据集大小: 50000\n",
            "训练集大小: 45000\n",
            "验证集大小: 5000\n"
          ]
        }
      ],
      "source": [
        "# 加载CIFAR-10数据集相关库\n",
        "import os  # 导入os模块用于文件路径操作\n",
        "import pandas as pd  # 导入pandas用于读取csv文件\n",
        "from PIL import Image  # 导入PIL库用于图片处理\n",
        "from torch.utils.data import Dataset  # 导入PyTorch的数据集基类\n",
        "\n",
        "# 定义CIFAR-10数据集类，继承自Dataset\n",
        "class CIFAR10Dataset(Dataset):\n",
        "    def __init__(self, img_dir, labels_file, transform=None):  # 构造函数，接收图片文件夹、标签文件和预处理方法\n",
        "        self.img_dir = img_dir  # 保存图片文件夹路径\n",
        "        self.transform = transform  # 保存预处理方法\n",
        "\n",
        "        # 读取标签文件，read_csv默认第一行为列名\n",
        "        self.labels_df = pd.read_csv(labels_file)  # 读取csv标签文件\n",
        "        self.img_names = self.labels_df.iloc[:, 0].values.astype(str)  # 获取第一列图片名，转为字符串数组\n",
        "\n",
        "        # 定义类别名称到数字的映射字典\n",
        "        self.class_names_dict = {'airplane': 0, 'automobile': 1, 'bird': 2, 'cat': 3,\n",
        "                                 'deer': 4, 'dog': 5, 'frog': 6, 'horse': 7, 'ship': 8, 'truck': 9}  # 类别映射字典\n",
        "        # 将文本标签转换为数字ID\n",
        "        self.labels = [self.class_names_dict[label] for label in self.labels_df.iloc[:, 1].values]  # 标签转数字\n",
        "\n",
        "    def __len__(self):  # 返回数据集大小\n",
        "        return len(self.labels)  # 返回标签数量\n",
        "\n",
        "    def __getitem__(self, idx):  # 获取指定索引的数据\n",
        "        img_path = os.path.join(self.img_dir, self.img_names[idx] + '.png')  # 拼接图片路径\n",
        "        image = Image.open(img_path)  # 打开图片\n",
        "        label = self.labels[idx]  # 获取标签\n",
        "\n",
        "        if self.transform:  # 如果有预处理\n",
        "            image_tensor = self.transform(image)  # 对图片做预处理\n",
        "\n",
        "        return image_tensor, label  # 返回图片张量和标签\n",
        "\n",
        "# 定义数据预处理流程\n",
        "transform = transforms.Compose([  # 使用Compose组合多种预处理\n",
        "    transforms.ToTensor(),  # 转为张量\n",
        "    transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))  # 标准化\n",
        "])\n",
        "\n",
        "# 加载CIFAR-10数据集\n",
        "img_dir = r\"competitions/cifar-10/train\"  # 图片文件夹路径\n",
        "labels_file = r\"./trainLabels.csv\"  # 标签文件路径\n",
        "full_dataset = CIFAR10Dataset(img_dir=img_dir, labels_file=labels_file, transform=transform)  # 创建完整数据集对象\n",
        "\n",
        "# 定义类别名称列表\n",
        "class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']  # 类别名称\n",
        "\n",
        "# 划分训练集和验证集\n",
        "train_size = 45000  # 训练集大小\n",
        "val_size = 5000  # 验证集大小\n",
        "generator = torch.Generator().manual_seed(42)  # 固定随机种子，保证可复现\n",
        "train_dataset, val_dataset = torch.utils.data.random_split(  # 随机划分数据集\n",
        "    full_dataset,  # 完整数据集\n",
        "    [train_size, val_size],  # 划分比例\n",
        "    generator=generator  # 随机数生成器\n",
        ")\n",
        "\n",
        "# 查看数据集基本信息\n",
        "print(f\"完整数据集大小: {len(full_dataset)}\")  # 打印完整数据集大小\n",
        "print(f\"训练集大小: {len(train_dataset)}\")  # 打印训练集大小\n",
        "print(f\"验证集大小: {len(val_dataset)}\")  # 打印验证集大小\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 9,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.148120Z",
          "start_time": "2025-06-26T01:43:33.145230Z"
        },
        "id": "dwLmD6WyaGiU"
      },
      "outputs": [],
      "source": [
        "def cal_mean_std(ds):\n",
        "    mean = 0.\n",
        "    std = 0.\n",
        "    for img, _ in ds:\n",
        "        mean += img.mean(dim=(1, 2)) #dim=(1, 2)表示在通道维度上求平均\n",
        "        std += img.std(dim=(1, 2))  #dim=(1, 2)表示在通道维度上求标准差\n",
        "    mean /= len(ds)\n",
        "    std /= len(ds)\n",
        "    return mean, std\n",
        "# cal_mean_std(train_dataset)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 10,
      "metadata": {
        "id": "gytNKcXdaGiU"
      },
      "outputs": [],
      "source": [
        "# 将划分好的45000训练集和5000验证集给DataLoader\n",
        "# 创建数据加载器\n",
        "batch_size = 64\n",
        "train_loader = torch.utils.data.DataLoader(\n",
        "    train_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=True #打乱数据集，每次迭代时，数据集的顺序都会被打乱\n",
        ")\n",
        "\n",
        "val_loader = torch.utils.data.DataLoader(\n",
        "    val_dataset,\n",
        "    batch_size=batch_size,\n",
        "    shuffle=False\n",
        ")\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "fllOhr1daGiU"
      },
      "source": [
        "# 复现简单版vgg11"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 11,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.152657Z",
          "start_time": "2025-06-26T01:43:33.148120Z"
        },
        "id": "cu8KLUamaGiU"
      },
      "outputs": [],
      "source": [
        "import torch.nn as nn  # 导入PyTorch的神经网络模块\n",
        "import torch.nn.functional as F  # 导入PyTorch的函数式API\n",
        "\n",
        "class VGG11_CIFAR10(nn.Module):  # 定义VGG11结构的模型，适用于CIFAR10\n",
        "    def __init__(self):\n",
        "        super().__init__()  # 调用父类初始化方法\n",
        "\n",
        "        # nn.Sequential的作用是将多个层组合成一个有序的容器，按顺序依次执行每一层\n",
        "        # 这样可以让代码更加简洁，方便管理和调用一组网络层\n",
        "        # 例如：nn.Sequential(卷积层, 批归一化, 激活函数, 池化层)会依次执行这些操作\n",
        "\n",
        "        # 第一组卷积层，输入3通道，输出64通道\n",
        "        self.conv1 = nn.Sequential(\n",
        "            nn.Conv2d(3, 64, kernel_size=3, padding=1),  # 卷积层，3->64\n",
        "            nn.BatchNorm2d(64),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第二组卷积层，输入64通道，输出128通道\n",
        "        self.conv2 = nn.Sequential(\n",
        "            nn.Conv2d(64, 128, kernel_size=3, padding=1),  # 卷积层，64->128\n",
        "            nn.BatchNorm2d(128),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第三组卷积层，输入128通道，输出256通道\n",
        "        self.conv3 = nn.Sequential(\n",
        "            nn.Conv2d(128, 256, kernel_size=3, padding=1),  # 卷积层，128->256\n",
        "            nn.BatchNorm2d(256),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Conv2d(256, 256, kernel_size=3, padding=1),  # 卷积层，256->256\n",
        "            nn.BatchNorm2d(256),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第四组卷积层，输入256通道，输出512通道\n",
        "        self.conv4 = nn.Sequential(\n",
        "            nn.Conv2d(256, 512, kernel_size=3, padding=1),  # 卷积层，256->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),  # 卷积层，512->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 第五组卷积层，输入512通道，输出512通道\n",
        "        self.conv5 = nn.Sequential(\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),  # 卷积层，512->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Conv2d(512, 512, kernel_size=3, padding=1),  # 卷积层，512->512\n",
        "            nn.BatchNorm2d(512),  # 批归一化\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.MaxPool2d(kernel_size=2, stride=2)  # 最大池化\n",
        "        )\n",
        "\n",
        "        # 计算展平后的特征维度\n",
        "        # CIFAR10输入32x32，经过5次2x2池化后为1x1\n",
        "        self.classifier = nn.Sequential(\n",
        "            nn.Linear(512 * 1 * 1, 4096),  # 全连接层，输入512，输出4096\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Dropout(0.5),  # Dropout防止过拟合\n",
        "            nn.Linear(4096, 4096),  # 全连接层，4096->4096\n",
        "            nn.ReLU(inplace=True),  # 激活函数\n",
        "            nn.Dropout(0.5),  # Dropout防止过拟合\n",
        "            nn.Linear(4096, 10)  # 输出层，10分类\n",
        "        )\n",
        "\n",
        "        self._initialize_weights()  # 初始化权重\n",
        "\n",
        "    def _initialize_weights(self):\n",
        "        # 使用xavier初始化卷积层和全连接层权重\n",
        "        for m in self.modules():\n",
        "            if isinstance(m, nn.Conv2d):  # 如果是卷积层\n",
        "                nn.init.xavier_uniform_(m.weight)  # xavier均匀初始化\n",
        "                if m.bias is not None:  # 如果有偏置\n",
        "                    nn.init.zeros_(m.bias)  # 偏置初始化为0\n",
        "            elif isinstance(m, nn.Linear):  # 如果是全连接层\n",
        "                nn.init.xavier_uniform_(m.weight)  # xavier均匀初始化\n",
        "                if m.bias is not None:  # 如果有偏置\n",
        "                    nn.init.zeros_(m.bias)  # 偏置初始化为0\n",
        "\n",
        "    def forward(self, x):\n",
        "        x = self.conv1(x)  # 第一组卷积\n",
        "        x = self.conv2(x)  # 第二组卷积\n",
        "        x = self.conv3(x)  # 第三组卷积\n",
        "        x = self.conv4(x)  # 第四组卷积\n",
        "        x = self.conv5(x)  # 第五组卷积\n",
        "        x = x.view(x.size(0), -1)  # 展平成一维\n",
        "        x = self.classifier(x)  # 全连接分类器\n",
        "        return x  # 返回输出\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 12,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.185031Z",
          "start_time": "2025-06-26T01:43:33.152657Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "87txlJ0AaGiV",
        "outputId": "e3a7926a-920d-47da-a968-0bdb07c7b369"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "批次图像形状: torch.Size([64, 3, 32, 32])\n",
            "批次标签形状: torch.Size([64])\n",
            "----------------------------------------------------------------------------------------------------\n",
            "torch.Size([64, 10])\n"
          ]
        }
      ],
      "source": [
        "# 实例化模型\n",
        "model = VGG11_CIFAR10()\n",
        "\n",
        "# 从train_loader获取第一个批次的数据\n",
        "dataiter = iter(train_loader)\n",
        "images, labels = next(dataiter)\n",
        "\n",
        "# 查看批次数据的形状\n",
        "print(\"批次图像形状:\", images.shape)\n",
        "print(\"批次标签形状:\", labels.shape)\n",
        "\n",
        "\n",
        "print('-'*100)\n",
        "# 进行前向传播\n",
        "with torch.no_grad():  # 不需要计算梯度\n",
        "    outputs = model(images)\n",
        "\n",
        "\n",
        "print(outputs.shape)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 13,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.203053Z",
          "start_time": "2025-06-26T01:43:33.199532Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "collapsed": true,
        "id": "C8o7E58BaGiV",
        "outputId": "5a13bc8e-e7ef-4576-aaab-eb4778d98045"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "需要求梯度的参数总量: 28149514\n",
            "模型总参数量: 28149514\n",
            "\n",
            "各层参数量明细:\n",
            "conv1.0.weight: 1728 参数\n",
            "conv1.0.bias: 64 参数\n",
            "conv1.1.weight: 64 参数\n",
            "conv1.1.bias: 64 参数\n",
            "conv2.0.weight: 73728 参数\n",
            "conv2.0.bias: 128 参数\n",
            "conv2.1.weight: 128 参数\n",
            "conv2.1.bias: 128 参数\n",
            "conv3.0.weight: 294912 参数\n",
            "conv3.0.bias: 256 参数\n",
            "conv3.1.weight: 256 参数\n",
            "conv3.1.bias: 256 参数\n",
            "conv3.3.weight: 589824 参数\n",
            "conv3.3.bias: 256 参数\n",
            "conv3.4.weight: 256 参数\n",
            "conv3.4.bias: 256 参数\n",
            "conv4.0.weight: 1179648 参数\n",
            "conv4.0.bias: 512 参数\n",
            "conv4.1.weight: 512 参数\n",
            "conv4.1.bias: 512 参数\n",
            "conv4.3.weight: 2359296 参数\n",
            "conv4.3.bias: 512 参数\n",
            "conv4.4.weight: 512 参数\n",
            "conv4.4.bias: 512 参数\n",
            "conv5.0.weight: 2359296 参数\n",
            "conv5.0.bias: 512 参数\n",
            "conv5.1.weight: 512 参数\n",
            "conv5.1.bias: 512 参数\n",
            "conv5.3.weight: 2359296 参数\n",
            "conv5.3.bias: 512 参数\n",
            "conv5.4.weight: 512 参数\n",
            "conv5.4.bias: 512 参数\n",
            "classifier.0.weight: 2097152 参数\n",
            "classifier.0.bias: 4096 参数\n",
            "classifier.3.weight: 16777216 参数\n",
            "classifier.3.bias: 4096 参数\n",
            "classifier.6.weight: 40960 参数\n",
            "classifier.6.bias: 10 参数\n"
          ]
        }
      ],
      "source": [
        "# 计算模型的总参数量\n",
        "# 统计需要求梯度的参数总量\n",
        "total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
        "print(f\"需要求梯度的参数总量: {total_params}\")\n",
        "\n",
        "# 统计所有参数总量\n",
        "all_params = sum(p.numel() for p in model.parameters())\n",
        "print(f\"模型总参数量: {all_params}\")\n",
        "\n",
        "# 查看每层参数量明细\n",
        "print(\"\\n各层参数量明细:\")\n",
        "for name, param in model.named_parameters():\n",
        "    print(f\"{name}: {param.numel()} 参数\")\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 14,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "T0J8iiHDaGiV",
        "outputId": "826f36c8-5ca3-48a7-cac6-6660767fe90e"
      },
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "294912"
            ]
          },
          "metadata": {},
          "execution_count": 14
        }
      ],
      "source": [
        "128*3*3*256"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "pRqqzwCUaGiV"
      },
      "source": [
        "# 各层参数量明细:\n",
        "conv1.weight: 288 参数 3*3*1*32\n",
        "conv1.bias: 32 参数\n",
        "conv2.weight: 9216 参数 3*3*32*32\n",
        "conv2.bias: 32 参数  \n",
        "conv3.weight: 18432 参数 3*3*32*64\n",
        "conv3.bias: 64 参数\n",
        "conv4.weight: 36864 参数  3*3*64*64\n",
        "conv4.bias: 64 参数\n",
        "conv5.weight: 73728 参数\n",
        "conv5.bias: 128 参数\n",
        "conv6.weight: 147456 参数\n",
        "conv6.bias: 128 参数\n",
        "fc1.weight: 294912 参数 128*3*3*256\n",
        "fc1.bias: 256 参数\n",
        "fc2.weight: 2560 参数\n",
        "fc2.bias: 10 参数"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 15,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:33.217395Z",
          "start_time": "2025-06-26T01:43:33.203561Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "collapsed": true,
        "id": "u5fYYs4RaGiW",
        "outputId": "c7dc2fa0-b75b-41be-b224-da4693887b2c"
      },
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "OrderedDict([('conv1.0.weight',\n",
              "              tensor([[[[-0.0122, -0.0499, -0.0968],\n",
              "                        [ 0.0445,  0.0536, -0.0842],\n",
              "                        [-0.0586, -0.0440,  0.0787]],\n",
              "              \n",
              "                       [[-0.0170, -0.0268,  0.0068],\n",
              "                        [-0.0533, -0.0239,  0.0124],\n",
              "                        [ 0.0419, -0.0677,  0.0833]],\n",
              "              \n",
              "                       [[ 0.0418, -0.0690, -0.0522],\n",
              "                        [ 0.0629, -0.0466,  0.0015],\n",
              "                        [ 0.0856,  0.0336,  0.0970]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0476, -0.0622, -0.0992],\n",
              "                        [-0.0748,  0.0101, -0.0284],\n",
              "                        [ 0.0146, -0.0731, -0.0407]],\n",
              "              \n",
              "                       [[ 0.0022, -0.0260,  0.0967],\n",
              "                        [ 0.0447, -0.0787, -0.0633],\n",
              "                        [-0.0571, -0.0701,  0.0945]],\n",
              "              \n",
              "                       [[-0.0768, -0.0866,  0.0767],\n",
              "                        [-0.0316,  0.0900,  0.0879],\n",
              "                        [ 0.0499,  0.0414, -0.0589]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0094, -0.0308,  0.0507],\n",
              "                        [-0.0393,  0.0897, -0.0319],\n",
              "                        [ 0.0311,  0.0758,  0.0777]],\n",
              "              \n",
              "                       [[-0.0543, -0.0976, -0.0154],\n",
              "                        [ 0.0034,  0.0684,  0.0958],\n",
              "                        [-0.0637,  0.0086, -0.0841]],\n",
              "              \n",
              "                       [[ 0.0750,  0.0019,  0.0185],\n",
              "                        [-0.0291,  0.0811, -0.0356],\n",
              "                        [ 0.0461,  0.0911,  0.0138]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-0.0112, -0.0568,  0.0071],\n",
              "                        [ 0.0088,  0.0728, -0.0322],\n",
              "                        [ 0.0961, -0.0227, -0.0996]],\n",
              "              \n",
              "                       [[-0.0546, -0.0846, -0.0764],\n",
              "                        [ 0.0175,  0.0201,  0.0898],\n",
              "                        [ 0.0259, -0.0986,  0.0651]],\n",
              "              \n",
              "                       [[ 0.0007, -0.0355, -0.0032],\n",
              "                        [ 0.0443,  0.0103, -0.0616],\n",
              "                        [-0.0853, -0.0587,  0.0922]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0614, -0.0461, -0.0919],\n",
              "                        [-0.0560,  0.0088,  0.0674],\n",
              "                        [ 0.0046, -0.0389, -0.0332]],\n",
              "              \n",
              "                       [[ 0.0492, -0.0443, -0.0681],\n",
              "                        [-0.0047,  0.0089, -0.0272],\n",
              "                        [-0.0957, -0.0748,  0.0679]],\n",
              "              \n",
              "                       [[ 0.0922,  0.0302, -0.0088],\n",
              "                        [ 0.0878, -0.0730, -0.0138],\n",
              "                        [-0.0914, -0.0871,  0.0567]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0186, -0.0951, -0.0842],\n",
              "                        [-0.0916,  0.0772,  0.0629],\n",
              "                        [-0.0030,  0.0086,  0.0425]],\n",
              "              \n",
              "                       [[ 0.0723,  0.0064,  0.0525],\n",
              "                        [-0.0080,  0.0910, -0.0194],\n",
              "                        [ 0.0878,  0.0690, -0.0272]],\n",
              "              \n",
              "                       [[ 0.0363,  0.0822,  0.0287],\n",
              "                        [ 0.0921, -0.0304, -0.0332],\n",
              "                        [ 0.0661, -0.0448, -0.0970]]]])),\n",
              "             ('conv1.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv1.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv1.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv1.1.running_mean',\n",
              "              tensor([-1.2216e-03, -6.9997e-04, -1.6114e-03,  4.3640e-04,  5.1172e-04,\n",
              "                       2.9812e-04,  2.2394e-03, -8.4697e-04,  8.1958e-04, -8.8179e-04,\n",
              "                       3.0380e-04, -4.7738e-04,  2.0125e-03,  8.8504e-04, -3.7016e-04,\n",
              "                      -2.0663e-04, -5.4003e-04,  1.1702e-04,  1.1843e-03, -3.4414e-04,\n",
              "                      -1.7936e-04, -6.1347e-04, -2.8383e-05, -1.3789e-03,  9.6846e-04,\n",
              "                       2.7072e-04,  1.8859e-03, -3.0363e-04,  1.6254e-03, -7.0507e-04,\n",
              "                      -1.0219e-03, -6.3015e-04,  1.8512e-03, -9.1755e-04,  2.1241e-04,\n",
              "                       1.3099e-03, -4.9339e-04, -4.8667e-04, -6.8614e-04,  2.7995e-04,\n",
              "                      -2.9474e-04, -1.1145e-03, -1.9800e-03, -9.3027e-04,  1.0590e-03,\n",
              "                      -4.3996e-04, -8.6069e-04, -6.6260e-05,  5.9472e-04,  1.3052e-03,\n",
              "                      -5.6045e-05,  2.5456e-04,  3.7023e-05, -1.5462e-03,  8.2060e-04,\n",
              "                      -1.9576e-04, -2.4693e-04,  3.3509e-04, -9.6865e-04,  9.4791e-04,\n",
              "                       5.6195e-04,  2.7707e-04, -1.9979e-04,  8.3182e-05])),\n",
              "             ('conv1.1.running_var',\n",
              "              tensor([0.9068, 0.9106, 0.9171, 0.9034, 0.9167, 0.9064, 0.9058, 0.9079, 0.9090,\n",
              "                      0.9062, 0.9028, 0.9055, 0.9067, 0.9539, 0.9170, 0.9097, 0.9107, 0.9285,\n",
              "                      0.9140, 0.9017, 0.9021, 0.9309, 0.9043, 0.9031, 0.9152, 0.9041, 0.9042,\n",
              "                      0.9144, 0.9492, 0.9068, 0.9228, 0.9015, 0.9050, 0.9039, 0.9172, 0.9037,\n",
              "                      0.9042, 0.9081, 0.9043, 0.9029, 0.9032, 0.9060, 0.9057, 0.9052, 0.9474,\n",
              "                      0.9052, 0.9098, 0.9136, 0.9106, 0.9068, 0.9166, 0.9071, 0.9333, 1.0081,\n",
              "                      0.9052, 0.9009, 0.9185, 0.9295, 0.9058, 0.9053, 0.9079, 0.9075, 0.9152,\n",
              "                      0.9156])),\n",
              "             ('conv1.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv2.0.weight',\n",
              "              tensor([[[[ 2.4279e-02, -4.2432e-02, -1.8170e-02],\n",
              "                        [ 1.7112e-02, -3.4676e-02, -1.9278e-03],\n",
              "                        [-5.0170e-02,  3.4115e-02, -4.7972e-02]],\n",
              "              \n",
              "                       [[-4.5501e-02, -8.0133e-03,  3.3426e-03],\n",
              "                        [ 9.8443e-03, -4.6080e-03,  5.3139e-02],\n",
              "                        [-2.9638e-02,  9.2570e-03,  3.0539e-02]],\n",
              "              \n",
              "                       [[ 3.5801e-02, -3.9774e-02,  2.0896e-02],\n",
              "                        [ 4.2292e-02, -3.4290e-02, -6.8237e-03],\n",
              "                        [-3.1118e-02, -4.1282e-02, -4.7687e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 4.1453e-02, -2.3463e-02,  4.0520e-02],\n",
              "                        [-5.0121e-02,  1.3792e-02,  2.4776e-02],\n",
              "                        [ 4.0963e-02, -8.5141e-03, -3.9696e-02]],\n",
              "              \n",
              "                       [[ 4.3300e-03,  3.2251e-02, -5.3912e-02],\n",
              "                        [ 4.1108e-03, -1.3102e-02,  5.2097e-02],\n",
              "                        [ 2.6883e-02, -5.7567e-02,  1.6721e-02]],\n",
              "              \n",
              "                       [[-2.9996e-02, -3.4497e-02, -7.8425e-03],\n",
              "                        [-5.4017e-02, -2.2465e-02,  1.7504e-02],\n",
              "                        [-3.5221e-02,  2.1588e-03,  4.3869e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-1.3208e-02,  1.2217e-02,  1.6129e-02],\n",
              "                        [ 3.8550e-02, -3.0103e-02,  3.5458e-02],\n",
              "                        [ 2.5906e-02,  4.7250e-02,  5.3314e-02]],\n",
              "              \n",
              "                       [[ 2.2230e-02,  3.9665e-02, -5.5733e-03],\n",
              "                        [-5.2416e-02, -3.3337e-02,  5.4549e-02],\n",
              "                        [-5.7637e-02, -5.1629e-02, -4.3010e-02]],\n",
              "              \n",
              "                       [[-1.1842e-02,  3.1479e-02, -4.1652e-02],\n",
              "                        [-1.4063e-02,  1.1628e-02, -2.9044e-02],\n",
              "                        [ 3.0053e-02, -1.2226e-02,  4.6675e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 5.2304e-02,  2.7689e-02,  3.0718e-02],\n",
              "                        [-3.7509e-02, -5.1613e-02,  3.0814e-02],\n",
              "                        [ 1.6469e-02,  1.8486e-02,  5.4465e-02]],\n",
              "              \n",
              "                       [[-3.2929e-02, -3.2940e-02,  4.2079e-02],\n",
              "                        [-4.8747e-02, -5.3043e-03,  2.1349e-02],\n",
              "                        [ 1.1345e-02,  7.0488e-03,  5.5865e-02]],\n",
              "              \n",
              "                       [[ 1.4496e-02,  2.5300e-02,  1.1122e-02],\n",
              "                        [ 6.2406e-03,  7.5460e-03, -4.6788e-02],\n",
              "                        [-1.9763e-02,  3.4026e-02,  4.4575e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 4.3911e-02,  4.5712e-02,  1.1450e-02],\n",
              "                        [-5.0694e-02, -3.2784e-02,  4.8307e-02],\n",
              "                        [-2.1567e-02, -3.2553e-02, -9.3052e-03]],\n",
              "              \n",
              "                       [[ 1.6794e-02,  2.6982e-02,  3.8643e-02],\n",
              "                        [ 3.7804e-02, -2.4221e-02,  1.3142e-02],\n",
              "                        [-1.3908e-02, -5.7464e-02,  5.5951e-02]],\n",
              "              \n",
              "                       [[ 4.6505e-02,  1.8777e-02,  5.4692e-02],\n",
              "                        [ 9.4667e-03,  1.3388e-02, -2.7040e-02],\n",
              "                        [ 9.8086e-03,  4.5455e-02, -9.2930e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-3.6276e-02, -2.4594e-02,  4.6080e-02],\n",
              "                        [ 1.7506e-02,  2.2076e-02, -1.3742e-02],\n",
              "                        [ 5.5156e-02,  6.8495e-03,  3.3502e-02]],\n",
              "              \n",
              "                       [[-1.8291e-03, -5.1427e-02,  4.5849e-02],\n",
              "                        [ 2.1217e-02, -3.4231e-03,  4.4302e-02],\n",
              "                        [ 3.9193e-03, -5.2900e-02,  2.2112e-02]],\n",
              "              \n",
              "                       [[ 1.6555e-02,  3.5546e-02,  4.2240e-02],\n",
              "                        [ 5.2233e-02, -8.6153e-03, -1.8515e-03],\n",
              "                        [ 2.9872e-02,  1.3180e-02, -5.0574e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 5.1708e-02, -1.5606e-02,  2.5851e-02],\n",
              "                        [ 5.6681e-02, -5.8686e-02, -1.9306e-02],\n",
              "                        [-5.9730e-03, -4.5616e-02, -7.9603e-03]],\n",
              "              \n",
              "                       [[-1.4204e-02, -3.7776e-02, -5.1240e-02],\n",
              "                        [-5.8381e-02, -3.8438e-02, -3.6316e-03],\n",
              "                        [ 3.0794e-02, -1.5326e-02, -1.8707e-02]],\n",
              "              \n",
              "                       [[-2.4554e-02, -4.4893e-02,  1.6412e-02],\n",
              "                        [ 3.3224e-02,  5.6562e-02,  4.9308e-02],\n",
              "                        [-8.2935e-03,  3.5908e-02, -4.2631e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-3.7151e-02,  6.0254e-03,  5.0131e-03],\n",
              "                        [ 7.8942e-03,  1.8266e-02,  2.3008e-03],\n",
              "                        [ 3.1729e-02, -4.0698e-02, -1.0697e-02]],\n",
              "              \n",
              "                       [[ 1.1728e-02,  4.6465e-02,  3.0352e-02],\n",
              "                        [-1.1699e-02,  3.7039e-02, -4.9020e-02],\n",
              "                        [-5.5993e-02,  3.0708e-03, -4.0648e-02]],\n",
              "              \n",
              "                       [[-7.1068e-03, -4.2368e-02, -2.7437e-02],\n",
              "                        [ 5.2434e-02, -4.5904e-02,  1.5292e-02],\n",
              "                        [ 1.3253e-02, -1.0289e-02,  3.7192e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 4.4819e-02,  5.4625e-02, -1.0186e-04],\n",
              "                        [ 1.2172e-02,  5.4732e-02, -3.6427e-02],\n",
              "                        [-5.2634e-02,  4.7484e-02, -1.6490e-02]],\n",
              "              \n",
              "                       [[ 3.4450e-02, -3.3977e-02, -1.0262e-02],\n",
              "                        [ 3.4078e-02, -2.0425e-02, -4.3286e-02],\n",
              "                        [ 2.7116e-02, -2.9984e-02,  4.9084e-03]],\n",
              "              \n",
              "                       [[-1.6863e-03,  5.8884e-02, -5.3280e-02],\n",
              "                        [-1.0382e-02, -4.2219e-02, -4.0490e-02],\n",
              "                        [-4.3030e-02, -3.0749e-02,  2.2832e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.8110e-02,  5.8414e-02,  6.9600e-03],\n",
              "                        [ 3.9499e-02,  1.5919e-02, -4.1709e-02],\n",
              "                        [-1.1787e-02, -5.7768e-02, -1.7213e-02]],\n",
              "              \n",
              "                       [[-3.4828e-02,  6.8287e-03, -5.7439e-02],\n",
              "                        [ 3.6342e-02,  8.3825e-03, -4.4222e-02],\n",
              "                        [-2.2008e-02,  2.9436e-02,  1.1936e-02]],\n",
              "              \n",
              "                       [[-3.2370e-02, -1.3816e-02,  1.2084e-02],\n",
              "                        [ 2.0706e-02,  1.3538e-03, -2.9647e-02],\n",
              "                        [-4.0634e-02,  5.1346e-02,  3.1413e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 2.7002e-02,  4.8877e-02,  1.6115e-02],\n",
              "                        [ 1.3865e-02,  4.5581e-02, -2.2816e-02],\n",
              "                        [-3.4625e-02, -1.0356e-02,  5.7455e-02]],\n",
              "              \n",
              "                       [[-3.1622e-02,  5.3122e-02, -4.9239e-02],\n",
              "                        [ 6.0878e-03,  2.6319e-02, -1.9807e-02],\n",
              "                        [ 2.5407e-02,  1.1694e-02,  2.0758e-02]],\n",
              "              \n",
              "                       [[-4.3711e-02,  3.6948e-02, -5.6984e-03],\n",
              "                        [-5.7978e-02,  4.0811e-02,  5.3552e-02],\n",
              "                        [ 1.6621e-02,  1.7280e-02, -1.7107e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.0246e-02,  3.8733e-02, -2.2565e-02],\n",
              "                        [ 5.5256e-02,  1.2209e-02,  7.0377e-03],\n",
              "                        [-4.2004e-02, -2.0940e-02,  7.7905e-03]],\n",
              "              \n",
              "                       [[-3.1200e-02,  5.3445e-02, -5.5429e-02],\n",
              "                        [-2.5382e-02,  4.6787e-02,  3.1906e-02],\n",
              "                        [ 1.6106e-02, -3.5960e-04, -6.0829e-03]],\n",
              "              \n",
              "                       [[-1.1653e-02,  5.2598e-02, -1.0255e-02],\n",
              "                        [ 1.8286e-02, -3.6808e-06,  2.9292e-02],\n",
              "                        [ 3.3226e-02, -3.9014e-02,  4.7076e-02]]]])),\n",
              "             ('conv2.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv2.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1.])),\n",
              "             ('conv2.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv2.1.running_mean',\n",
              "              tensor([ 2.3187e-02,  2.3965e-02,  2.9930e-02, -4.7058e-02,  2.6296e-03,\n",
              "                      -2.9153e-02, -4.4134e-02, -2.2593e-03, -5.5411e-02,  4.6451e-02,\n",
              "                       1.1457e-02,  3.9986e-02,  3.2987e-02,  6.6897e-02, -7.8513e-03,\n",
              "                      -5.4703e-02, -2.2887e-02,  3.7553e-02, -1.5975e-02, -9.5378e-04,\n",
              "                       3.5783e-02, -4.7890e-02,  5.0400e-02, -5.6624e-03, -8.2629e-02,\n",
              "                       5.2451e-03,  2.2068e-02, -5.5129e-02,  2.9486e-02, -8.4154e-03,\n",
              "                      -6.5165e-02, -8.0876e-02,  2.0712e-02,  4.4259e-02, -4.9094e-02,\n",
              "                       1.4837e-02, -3.4857e-02, -6.8841e-03,  4.4065e-02,  4.7074e-02,\n",
              "                       1.4192e-04,  1.3145e-02,  1.1465e-02,  7.5472e-03, -2.9328e-02,\n",
              "                      -1.1363e-02,  7.6173e-02, -2.5506e-02, -8.6376e-03,  5.3965e-02,\n",
              "                      -3.6237e-02, -5.1290e-02,  4.6007e-04,  4.2664e-02,  9.0767e-03,\n",
              "                       4.3998e-02, -2.2284e-02, -3.6576e-02, -3.0340e-02, -4.9598e-02,\n",
              "                       6.5204e-02,  2.3908e-02, -1.7536e-02,  2.5707e-02,  5.4773e-02,\n",
              "                      -1.2842e-02,  5.5010e-02,  1.9816e-02,  1.5869e-02,  4.4483e-02,\n",
              "                       2.2570e-02,  3.6546e-02, -6.7356e-02,  3.3753e-02, -3.0840e-02,\n",
              "                       2.7575e-03,  1.1745e-03, -2.6640e-02, -6.2447e-03,  1.9853e-02,\n",
              "                       4.5552e-02,  4.6207e-05, -1.1928e-04,  5.4919e-02, -2.4463e-02,\n",
              "                      -5.6992e-02, -1.8717e-02, -1.9483e-02, -1.9035e-02, -3.6453e-02,\n",
              "                       3.7324e-02,  1.5900e-02,  8.4639e-02, -7.9303e-02, -5.8650e-02,\n",
              "                      -3.2611e-02,  5.5180e-02, -3.0342e-02,  3.0099e-02,  3.7377e-02,\n",
              "                       1.5126e-02, -2.1492e-02, -1.9316e-02,  3.5640e-02,  5.4127e-02,\n",
              "                       2.9390e-02, -1.8360e-02, -4.6529e-02,  3.6899e-02,  1.5362e-02,\n",
              "                      -1.0643e-02, -7.6066e-03, -3.7307e-02, -3.8841e-02, -1.8993e-02,\n",
              "                      -5.5324e-02, -1.2378e-02,  5.6796e-03,  4.8141e-03, -2.4033e-02,\n",
              "                       2.0382e-02,  2.5751e-02, -1.9400e-03,  5.0917e-02,  4.9454e-02,\n",
              "                       4.1960e-02, -3.5571e-02,  3.4937e-02])),\n",
              "             ('conv2.1.running_var',\n",
              "              tensor([0.9556, 0.9303, 0.9185, 0.9232, 0.9253, 0.9233, 0.9180, 0.9317, 0.9250,\n",
              "                      0.9236, 0.9338, 0.9300, 0.9183, 0.9235, 0.9343, 0.9260, 0.9165, 0.9273,\n",
              "                      0.9229, 0.9366, 0.9218, 0.9309, 0.9282, 0.9182, 0.9404, 0.9190, 0.9439,\n",
              "                      0.9297, 0.9536, 0.9940, 0.9548, 0.9281, 0.9178, 0.9203, 0.9293, 0.9269,\n",
              "                      0.9213, 0.9268, 0.9263, 0.9227, 0.9321, 0.9290, 0.9354, 0.9329, 0.9496,\n",
              "                      0.9333, 0.9326, 0.9222, 0.9448, 0.9244, 0.9255, 0.9432, 0.9488, 0.9227,\n",
              "                      0.9322, 0.9435, 0.9199, 0.9220, 0.9170, 0.9256, 0.9514, 0.9132, 0.9180,\n",
              "                      0.9272, 0.9241, 0.9191, 0.9570, 0.9215, 0.9636, 0.9349, 0.9421, 0.9324,\n",
              "                      0.9274, 0.9242, 0.9250, 0.9144, 0.9199, 0.9636, 0.9305, 0.9196, 0.9230,\n",
              "                      0.9173, 0.9135, 0.9344, 0.9378, 0.9229, 0.9182, 0.9359, 0.9200, 0.9259,\n",
              "                      0.9364, 0.9304, 0.9328, 0.9422, 0.9274, 0.9272, 0.9340, 0.9432, 0.9382,\n",
              "                      0.9241, 0.9128, 0.9206, 0.9395, 0.9223, 0.9490, 0.9452, 0.9202, 0.9247,\n",
              "                      0.9396, 0.9196, 0.9288, 0.9366, 0.9435, 0.9255, 0.9275, 0.9169, 0.9119,\n",
              "                      0.9511, 0.9310, 0.9153, 0.9212, 0.9209, 0.9365, 0.9284, 0.9359, 0.9223,\n",
              "                      0.9147, 0.9220])),\n",
              "             ('conv2.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv3.0.weight',\n",
              "              tensor([[[[-1.1296e-02,  3.7234e-02, -6.3458e-03],\n",
              "                        [-2.5140e-02, -3.1746e-02,  1.7652e-02],\n",
              "                        [ 1.1846e-02,  3.5071e-02, -3.2379e-02]],\n",
              "              \n",
              "                       [[-2.3285e-02,  2.8994e-02,  3.8184e-02],\n",
              "                        [-2.3800e-02, -3.9537e-02,  6.0377e-03],\n",
              "                        [ 3.0296e-02, -1.4434e-02,  4.1342e-02]],\n",
              "              \n",
              "                       [[ 6.7813e-03,  1.6588e-02,  4.1387e-02],\n",
              "                        [ 2.6224e-03, -1.0789e-02, -4.0457e-02],\n",
              "                        [ 1.6361e-02, -2.6033e-03,  9.2437e-06]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 2.2807e-02, -1.3763e-02, -3.1944e-02],\n",
              "                        [-3.7565e-02, -2.1086e-02,  1.2704e-02],\n",
              "                        [ 4.9216e-03,  8.6326e-03, -4.4195e-03]],\n",
              "              \n",
              "                       [[-2.0492e-02,  2.8187e-02,  2.6112e-02],\n",
              "                        [ 3.9010e-02,  3.4868e-02,  1.2724e-02],\n",
              "                        [-7.1236e-03, -3.5503e-02,  2.0285e-02]],\n",
              "              \n",
              "                       [[-2.9353e-02, -1.4361e-02,  3.8072e-02],\n",
              "                        [ 3.3207e-02,  7.2705e-03,  1.1401e-02],\n",
              "                        [ 1.1376e-02, -3.1927e-03, -3.6262e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 3.7889e-02,  1.8312e-02,  2.8202e-02],\n",
              "                        [-3.8624e-02, -1.4707e-02,  3.6360e-02],\n",
              "                        [-5.3850e-03, -1.1164e-02, -4.4638e-03]],\n",
              "              \n",
              "                       [[-3.3361e-02, -3.3310e-02,  2.7460e-02],\n",
              "                        [-2.7131e-03,  4.7736e-03, -3.9764e-02],\n",
              "                        [-2.6189e-02, -5.2219e-03,  1.2695e-02]],\n",
              "              \n",
              "                       [[ 2.1955e-02, -1.7596e-02, -1.8694e-02],\n",
              "                        [-1.3584e-02, -2.8682e-02,  9.7986e-03],\n",
              "                        [ 3.7525e-02,  3.9063e-02,  1.2827e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.4003e-02, -2.5730e-02, -3.7799e-02],\n",
              "                        [ 3.3611e-02, -5.0234e-03, -3.1209e-02],\n",
              "                        [ 1.5020e-03,  9.4638e-03, -4.0899e-02]],\n",
              "              \n",
              "                       [[-7.2300e-03,  2.4333e-02,  3.5572e-02],\n",
              "                        [-3.6426e-02, -1.2126e-02, -2.8046e-03],\n",
              "                        [-2.5814e-04,  1.8290e-02,  1.3035e-03]],\n",
              "              \n",
              "                       [[ 2.2061e-02, -3.9440e-02, -4.0061e-02],\n",
              "                        [-1.4680e-02, -3.8409e-02,  2.4737e-02],\n",
              "                        [-2.4924e-02, -2.1802e-02,  3.5092e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 4.0683e-02,  3.3152e-02,  1.6132e-02],\n",
              "                        [ 1.5646e-02, -3.0077e-02,  3.7931e-02],\n",
              "                        [-1.4880e-02,  4.0199e-02, -1.3444e-02]],\n",
              "              \n",
              "                       [[-2.2082e-02,  2.6297e-02, -3.7278e-03],\n",
              "                        [ 9.8695e-03,  2.5163e-02,  1.1337e-02],\n",
              "                        [-1.2990e-02, -1.9705e-02,  9.3979e-03]],\n",
              "              \n",
              "                       [[ 2.9008e-02,  3.2884e-02,  3.4437e-02],\n",
              "                        [ 9.3514e-03,  3.6485e-02,  4.1477e-02],\n",
              "                        [ 3.4074e-02,  3.6879e-02, -2.3113e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.2053e-02, -6.4548e-03,  1.5526e-02],\n",
              "                        [ 2.2614e-02,  2.7453e-02,  2.2915e-02],\n",
              "                        [-3.5506e-02, -1.5111e-02, -2.4114e-02]],\n",
              "              \n",
              "                       [[-2.7133e-02,  4.2581e-04, -2.3549e-02],\n",
              "                        [-3.1213e-02,  1.5428e-02, -4.8060e-03],\n",
              "                        [-1.2551e-02,  3.4357e-02,  3.7400e-02]],\n",
              "              \n",
              "                       [[-3.9433e-03,  1.5599e-02,  2.8049e-03],\n",
              "                        [ 3.4177e-03,  3.4869e-02, -2.1280e-02],\n",
              "                        [-1.4409e-02, -1.4707e-02, -1.9280e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-1.0845e-02,  1.5121e-02,  9.7062e-03],\n",
              "                        [ 7.3305e-03, -1.8072e-02, -7.6561e-03],\n",
              "                        [-2.9039e-02, -3.6593e-02, -8.9373e-03]],\n",
              "              \n",
              "                       [[-3.3628e-02,  3.5270e-02, -2.8752e-02],\n",
              "                        [-3.5046e-02, -2.3440e-02,  1.4996e-02],\n",
              "                        [ 3.5497e-02,  1.3249e-02, -6.7002e-03]],\n",
              "              \n",
              "                       [[-4.5444e-03,  1.1689e-02, -2.9225e-02],\n",
              "                        [-8.7543e-03,  3.2695e-03, -2.9043e-02],\n",
              "                        [-1.5048e-02, -2.1378e-02, -3.2598e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.9793e-02, -3.4513e-02,  1.8145e-02],\n",
              "                        [ 6.8180e-03, -6.6837e-03,  1.9520e-02],\n",
              "                        [-3.8790e-02, -1.7146e-02, -9.5527e-03]],\n",
              "              \n",
              "                       [[ 3.7039e-02,  6.1294e-03,  1.6024e-02],\n",
              "                        [ 4.0196e-02,  2.2258e-02,  3.8822e-02],\n",
              "                        [ 3.9699e-02, -1.1919e-02, -2.6242e-02]],\n",
              "              \n",
              "                       [[ 2.6259e-02, -1.9140e-02,  4.1991e-03],\n",
              "                        [ 3.7039e-02,  3.1189e-02, -3.4134e-02],\n",
              "                        [-9.3064e-03, -2.7913e-02, -2.0073e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-4.1268e-02, -9.0669e-03, -1.5889e-03],\n",
              "                        [-2.7609e-02, -1.9126e-02,  3.0876e-03],\n",
              "                        [ 2.4537e-02, -3.7430e-02,  1.3140e-02]],\n",
              "              \n",
              "                       [[-9.9465e-03,  1.9384e-02,  4.3627e-03],\n",
              "                        [-1.6070e-02, -1.2073e-02, -4.1668e-03],\n",
              "                        [-8.4864e-03,  1.3453e-02,  3.2535e-02]],\n",
              "              \n",
              "                       [[-3.2779e-02, -2.4784e-02,  8.5504e-03],\n",
              "                        [ 2.2552e-02,  7.5583e-03, -2.8986e-02],\n",
              "                        [ 3.8964e-02, -2.8841e-02, -2.1856e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.6664e-02, -1.2477e-04, -3.7810e-02],\n",
              "                        [-3.9111e-02,  1.4904e-02, -2.4482e-02],\n",
              "                        [-3.9841e-03,  1.4759e-02,  2.2761e-03]],\n",
              "              \n",
              "                       [[ 3.4098e-02, -2.1685e-02,  2.1018e-02],\n",
              "                        [-3.0749e-02, -1.9424e-02, -3.3245e-02],\n",
              "                        [ 1.0569e-02,  2.1625e-02, -2.0361e-02]],\n",
              "              \n",
              "                       [[ 1.1577e-02, -2.9355e-02, -4.8733e-03],\n",
              "                        [-7.4396e-03,  2.6321e-02,  3.6980e-02],\n",
              "                        [-2.5395e-02, -3.2135e-02,  1.3879e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 3.1644e-02,  3.9523e-02,  9.2333e-03],\n",
              "                        [-1.7772e-02,  1.0222e-02,  2.8444e-02],\n",
              "                        [ 3.8256e-03,  1.9353e-03,  2.6899e-02]],\n",
              "              \n",
              "                       [[-2.5418e-02, -3.5702e-02, -1.8125e-02],\n",
              "                        [-1.6942e-02, -2.5018e-02,  2.5545e-02],\n",
              "                        [ 2.1048e-02,  1.9798e-03, -1.0847e-02]],\n",
              "              \n",
              "                       [[-1.5844e-02,  1.1307e-02,  2.7565e-02],\n",
              "                        [ 1.0351e-02, -2.6073e-02, -1.4044e-02],\n",
              "                        [ 1.8650e-02,  3.2459e-02, -3.0195e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 3.0461e-02,  6.8160e-03, -3.8838e-02],\n",
              "                        [-3.6144e-02, -1.6051e-02,  5.4354e-03],\n",
              "                        [ 4.0407e-02, -1.5294e-03,  3.9157e-02]],\n",
              "              \n",
              "                       [[ 1.4942e-02,  5.0988e-03,  2.0383e-02],\n",
              "                        [-9.4489e-03,  1.8636e-02,  3.7809e-02],\n",
              "                        [-1.1967e-02, -1.8936e-02, -1.5009e-02]],\n",
              "              \n",
              "                       [[ 2.1348e-02,  1.0090e-02, -2.0279e-02],\n",
              "                        [-9.4860e-03, -5.7327e-03, -1.6515e-02],\n",
              "                        [-8.5877e-03, -3.2463e-02,  4.3146e-03]]]])),\n",
              "             ('conv3.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1.])),\n",
              "             ('conv3.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.1.running_mean',\n",
              "              tensor([ 0.0440,  0.0242,  0.0907, -0.0067, -0.0634, -0.0103, -0.0056, -0.0777,\n",
              "                       0.0382,  0.0381, -0.0717, -0.0541, -0.0463, -0.0330,  0.0289,  0.0383,\n",
              "                       0.0404, -0.0497,  0.0081,  0.0206,  0.0081, -0.0467,  0.0707, -0.0392,\n",
              "                       0.0793, -0.0228,  0.0126,  0.0330, -0.0069, -0.0055, -0.0373,  0.0614,\n",
              "                      -0.0078, -0.0640,  0.0128, -0.0483,  0.0529,  0.0450,  0.0253,  0.0283,\n",
              "                       0.0717, -0.0627,  0.0107,  0.0498, -0.0005,  0.0706,  0.0282,  0.0551,\n",
              "                      -0.0151, -0.0232,  0.0166,  0.0274,  0.0390, -0.0015,  0.0294,  0.0206,\n",
              "                       0.0351,  0.0216, -0.0291, -0.0157, -0.0921,  0.0353, -0.0570, -0.0311,\n",
              "                      -0.0581, -0.0297, -0.0797,  0.0479,  0.1123, -0.0419, -0.0095, -0.0289,\n",
              "                      -0.0322,  0.0524, -0.0134,  0.0026, -0.0426,  0.0085, -0.0492,  0.0252,\n",
              "                      -0.0020, -0.0803,  0.0650, -0.0066,  0.0055,  0.0124, -0.0389,  0.0448,\n",
              "                       0.0808,  0.0347, -0.0102, -0.0630, -0.0255, -0.0447,  0.0249, -0.0174,\n",
              "                       0.0320,  0.0029, -0.0170, -0.0260, -0.0002,  0.0761,  0.0187, -0.0334,\n",
              "                       0.0338,  0.0852,  0.0261,  0.0384,  0.0740, -0.0277, -0.0535,  0.0714,\n",
              "                       0.0633,  0.0869, -0.0528, -0.0027,  0.0441, -0.0125,  0.0091,  0.0039,\n",
              "                      -0.0259,  0.0446, -0.0544,  0.0021,  0.0087, -0.0867, -0.0317, -0.0810,\n",
              "                      -0.0580,  0.0335, -0.1092, -0.0676,  0.1087, -0.0427, -0.0278,  0.0192,\n",
              "                       0.0087, -0.0592, -0.0254,  0.0275,  0.1370,  0.0035, -0.0983,  0.0057,\n",
              "                       0.0213, -0.0238, -0.0996,  0.0307, -0.0118,  0.0171,  0.0386, -0.0298,\n",
              "                       0.0079,  0.0392,  0.0685,  0.0356, -0.0199, -0.0080,  0.0456, -0.0041,\n",
              "                       0.0054,  0.0007, -0.0369,  0.0473, -0.0192, -0.0233, -0.0483,  0.0134,\n",
              "                       0.0684, -0.0435,  0.0328,  0.0247, -0.0261,  0.0573,  0.0094, -0.0040,\n",
              "                      -0.0115,  0.0020,  0.0214, -0.0102,  0.0456, -0.0799,  0.0256, -0.0461,\n",
              "                       0.0040,  0.0541,  0.0165,  0.0135,  0.0184, -0.0130, -0.0626,  0.0872,\n",
              "                      -0.0190,  0.0692,  0.0106, -0.0239,  0.0166, -0.0127,  0.0550,  0.0447,\n",
              "                       0.1221, -0.0247, -0.0593, -0.0291,  0.0280,  0.0159, -0.0143, -0.1060,\n",
              "                       0.0210,  0.0055, -0.0553,  0.0436,  0.0251,  0.0515,  0.0514,  0.0032,\n",
              "                       0.0633,  0.0292,  0.0994, -0.0449,  0.0014,  0.0184,  0.0257,  0.0921,\n",
              "                      -0.0292,  0.0713, -0.0637,  0.1189,  0.0117,  0.0272,  0.0118, -0.0509,\n",
              "                      -0.1191,  0.0017,  0.0715, -0.0473,  0.0300,  0.0240,  0.0497,  0.1094,\n",
              "                      -0.0613, -0.0059, -0.0448, -0.0175, -0.0123,  0.0789,  0.0074,  0.0204,\n",
              "                      -0.0445, -0.0018,  0.0052, -0.1167, -0.0630,  0.0740,  0.0979,  0.0330])),\n",
              "             ('conv3.1.running_var',\n",
              "              tensor([0.9235, 0.9261, 0.9395, 0.9381, 0.9361, 0.9278, 0.9381, 0.9239, 0.9265,\n",
              "                      0.9484, 0.9439, 0.9526, 0.9288, 0.9329, 0.9389, 0.9305, 0.9474, 0.9191,\n",
              "                      0.9213, 0.9308, 0.9345, 0.9323, 0.9605, 0.9372, 0.9323, 0.9353, 0.9419,\n",
              "                      0.9310, 0.9254, 0.9221, 0.9320, 0.9511, 0.9266, 0.9336, 0.9271, 0.9511,\n",
              "                      0.9415, 0.9563, 0.9384, 0.9345, 0.9337, 0.9438, 0.9415, 0.9292, 0.9340,\n",
              "                      0.9358, 0.9372, 0.9212, 0.9364, 0.9257, 0.9375, 0.9275, 0.9440, 0.9238,\n",
              "                      0.9374, 0.9378, 0.9242, 0.9315, 0.9418, 0.9270, 0.9449, 0.9352, 0.9227,\n",
              "                      0.9329, 0.9289, 0.9317, 0.9412, 0.9315, 0.9437, 0.9271, 0.9198, 0.9341,\n",
              "                      0.9242, 0.9390, 0.9279, 0.9264, 0.9317, 0.9253, 0.9291, 0.9344, 0.9250,\n",
              "                      0.9509, 0.9343, 0.9365, 0.9392, 0.9317, 0.9333, 0.9383, 0.9312, 0.9332,\n",
              "                      0.9213, 0.9436, 0.9281, 0.9470, 0.9464, 0.9484, 0.9335, 0.9331, 0.9209,\n",
              "                      0.9359, 0.9336, 0.9607, 0.9489, 0.9244, 0.9366, 0.9859, 0.9412, 0.9274,\n",
              "                      0.9461, 0.9281, 0.9322, 0.9346, 0.9300, 0.9383, 0.9426, 0.9238, 0.9403,\n",
              "                      0.9233, 0.9202, 0.9279, 0.9320, 0.9406, 0.9312, 0.9401, 0.9234, 0.9450,\n",
              "                      0.9338, 0.9302, 0.9335, 0.9294, 0.9535, 0.9352, 0.9531, 0.9613, 0.9358,\n",
              "                      0.9234, 0.9319, 0.9470, 0.9387, 0.9316, 0.9555, 0.9208, 0.9333, 0.9277,\n",
              "                      0.9446, 0.9286, 0.9458, 0.9233, 0.9375, 0.9277, 0.9379, 0.9183, 0.9215,\n",
              "                      0.9284, 0.9294, 0.9408, 0.9338, 0.9449, 0.9451, 0.9190, 0.9384, 0.9416,\n",
              "                      0.9267, 0.9454, 0.9347, 0.9355, 0.9282, 0.9249, 0.9473, 0.9206, 0.9285,\n",
              "                      0.9411, 0.9259, 0.9549, 0.9338, 0.9244, 0.9232, 0.9357, 0.9329, 0.9261,\n",
              "                      0.9299, 0.9394, 0.9263, 0.9378, 0.9262, 0.9324, 0.9320, 0.9457, 0.9294,\n",
              "                      0.9442, 0.9528, 0.9357, 0.9282, 0.9316, 0.9248, 0.9570, 0.9309, 0.9316,\n",
              "                      0.9419, 0.9276, 0.9470, 0.9295, 0.9412, 0.9478, 0.9278, 0.9358, 0.9389,\n",
              "                      0.9311, 0.9308, 0.9271, 0.9322, 0.9343, 0.9260, 0.9402, 0.9436, 0.9242,\n",
              "                      0.9294, 0.9305, 0.9402, 0.9350, 0.9336, 0.9346, 0.9299, 0.9548, 0.9308,\n",
              "                      0.9383, 0.9305, 0.9495, 0.9331, 0.9453, 0.9276, 0.9508, 0.9604, 0.9250,\n",
              "                      0.9300, 0.9277, 0.9316, 0.9364, 0.9385, 0.9484, 0.9315, 0.9300, 0.9298,\n",
              "                      0.9279, 0.9632, 0.9461, 0.9299, 0.9279, 0.9354, 0.9201, 0.9327, 0.9413,\n",
              "                      0.9286, 0.9525, 0.9492, 0.9250])),\n",
              "             ('conv3.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv3.3.weight',\n",
              "              tensor([[[[-0.0209,  0.0021,  0.0353],\n",
              "                        [ 0.0339, -0.0215, -0.0226],\n",
              "                        [-0.0189,  0.0344, -0.0138]],\n",
              "              \n",
              "                       [[-0.0346,  0.0308, -0.0305],\n",
              "                        [ 0.0360,  0.0110,  0.0190],\n",
              "                        [-0.0314, -0.0035, -0.0251]],\n",
              "              \n",
              "                       [[ 0.0297,  0.0194,  0.0025],\n",
              "                        [ 0.0072, -0.0111, -0.0112],\n",
              "                        [ 0.0079,  0.0330, -0.0349]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0208,  0.0054, -0.0287],\n",
              "                        [-0.0285,  0.0058,  0.0026],\n",
              "                        [-0.0311, -0.0306,  0.0108]],\n",
              "              \n",
              "                       [[ 0.0051, -0.0189, -0.0316],\n",
              "                        [ 0.0107, -0.0203, -0.0328],\n",
              "                        [ 0.0311,  0.0304, -0.0124]],\n",
              "              \n",
              "                       [[-0.0184,  0.0031, -0.0040],\n",
              "                        [ 0.0345,  0.0213, -0.0087],\n",
              "                        [ 0.0185,  0.0091,  0.0021]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0160,  0.0121, -0.0345],\n",
              "                        [-0.0080,  0.0229,  0.0150],\n",
              "                        [ 0.0301,  0.0059,  0.0183]],\n",
              "              \n",
              "                       [[-0.0216,  0.0085, -0.0063],\n",
              "                        [ 0.0135,  0.0200,  0.0080],\n",
              "                        [ 0.0235, -0.0023,  0.0345]],\n",
              "              \n",
              "                       [[ 0.0149,  0.0053,  0.0028],\n",
              "                        [ 0.0335, -0.0264,  0.0091],\n",
              "                        [-0.0186, -0.0228, -0.0039]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0153, -0.0350,  0.0144],\n",
              "                        [ 0.0148, -0.0143,  0.0046],\n",
              "                        [ 0.0128, -0.0034, -0.0193]],\n",
              "              \n",
              "                       [[-0.0213, -0.0180, -0.0216],\n",
              "                        [-0.0199,  0.0311,  0.0011],\n",
              "                        [-0.0335,  0.0128, -0.0201]],\n",
              "              \n",
              "                       [[-0.0192, -0.0083,  0.0135],\n",
              "                        [-0.0163, -0.0267, -0.0125],\n",
              "                        [ 0.0322, -0.0281,  0.0103]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0237,  0.0238,  0.0047],\n",
              "                        [ 0.0119,  0.0011,  0.0105],\n",
              "                        [ 0.0297, -0.0242, -0.0139]],\n",
              "              \n",
              "                       [[-0.0146, -0.0247, -0.0265],\n",
              "                        [-0.0203,  0.0171,  0.0291],\n",
              "                        [ 0.0209,  0.0341,  0.0005]],\n",
              "              \n",
              "                       [[-0.0064, -0.0292,  0.0065],\n",
              "                        [-0.0223, -0.0091, -0.0245],\n",
              "                        [-0.0182, -0.0222, -0.0199]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0042, -0.0326,  0.0144],\n",
              "                        [ 0.0336, -0.0333, -0.0206],\n",
              "                        [-0.0017, -0.0176, -0.0061]],\n",
              "              \n",
              "                       [[ 0.0344,  0.0207, -0.0002],\n",
              "                        [ 0.0273,  0.0044,  0.0082],\n",
              "                        [ 0.0126, -0.0335, -0.0174]],\n",
              "              \n",
              "                       [[-0.0252,  0.0046, -0.0037],\n",
              "                        [-0.0199, -0.0337,  0.0247],\n",
              "                        [ 0.0026,  0.0318,  0.0316]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0257,  0.0190,  0.0276],\n",
              "                        [ 0.0345,  0.0163,  0.0020],\n",
              "                        [-0.0348,  0.0170, -0.0257]],\n",
              "              \n",
              "                       [[ 0.0115,  0.0148, -0.0120],\n",
              "                        [ 0.0314, -0.0201, -0.0283],\n",
              "                        [-0.0310, -0.0251, -0.0321]],\n",
              "              \n",
              "                       [[ 0.0032, -0.0361,  0.0281],\n",
              "                        [ 0.0165,  0.0351, -0.0100],\n",
              "                        [ 0.0330, -0.0268,  0.0206]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0243,  0.0051,  0.0175],\n",
              "                        [-0.0285, -0.0116,  0.0254],\n",
              "                        [-0.0352,  0.0257, -0.0036]],\n",
              "              \n",
              "                       [[-0.0150,  0.0047, -0.0251],\n",
              "                        [ 0.0126, -0.0214,  0.0333],\n",
              "                        [-0.0146, -0.0279,  0.0240]],\n",
              "              \n",
              "                       [[-0.0133,  0.0309,  0.0008],\n",
              "                        [ 0.0119,  0.0337,  0.0133],\n",
              "                        [-0.0300, -0.0035, -0.0185]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0046,  0.0069,  0.0103],\n",
              "                        [ 0.0119,  0.0323,  0.0072],\n",
              "                        [-0.0216,  0.0140, -0.0095]],\n",
              "              \n",
              "                       [[ 0.0074,  0.0244, -0.0128],\n",
              "                        [-0.0041, -0.0333,  0.0186],\n",
              "                        [-0.0234,  0.0306,  0.0018]],\n",
              "              \n",
              "                       [[ 0.0154,  0.0223,  0.0320],\n",
              "                        [ 0.0150, -0.0136, -0.0307],\n",
              "                        [-0.0339,  0.0127, -0.0354]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0207, -0.0089, -0.0204],\n",
              "                        [ 0.0276,  0.0284, -0.0273],\n",
              "                        [ 0.0274,  0.0188,  0.0006]],\n",
              "              \n",
              "                       [[-0.0125,  0.0219,  0.0358],\n",
              "                        [ 0.0359, -0.0208,  0.0310],\n",
              "                        [ 0.0187, -0.0285,  0.0198]],\n",
              "              \n",
              "                       [[-0.0358,  0.0152,  0.0348],\n",
              "                        [ 0.0025, -0.0096,  0.0062],\n",
              "                        [ 0.0174, -0.0250, -0.0124]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0166, -0.0260, -0.0293],\n",
              "                        [-0.0253, -0.0250,  0.0045],\n",
              "                        [-0.0075,  0.0061, -0.0237]],\n",
              "              \n",
              "                       [[ 0.0264, -0.0100, -0.0051],\n",
              "                        [-0.0278, -0.0157, -0.0246],\n",
              "                        [ 0.0284,  0.0323, -0.0305]],\n",
              "              \n",
              "                       [[ 0.0293,  0.0129,  0.0272],\n",
              "                        [-0.0127, -0.0010, -0.0267],\n",
              "                        [-0.0072, -0.0336,  0.0210]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0029, -0.0165,  0.0143],\n",
              "                        [ 0.0057, -0.0331,  0.0343],\n",
              "                        [ 0.0148, -0.0351, -0.0097]],\n",
              "              \n",
              "                       [[-0.0002, -0.0203, -0.0228],\n",
              "                        [-0.0128, -0.0254,  0.0045],\n",
              "                        [-0.0356,  0.0274,  0.0318]],\n",
              "              \n",
              "                       [[ 0.0216,  0.0279,  0.0177],\n",
              "                        [ 0.0150, -0.0306,  0.0211],\n",
              "                        [ 0.0119, -0.0348,  0.0333]]]])),\n",
              "             ('conv3.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1.])),\n",
              "             ('conv3.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv3.4.running_mean',\n",
              "              tensor([-0.0007, -0.0121,  0.0048, -0.0053,  0.0017, -0.0147,  0.0287, -0.0172,\n",
              "                       0.0028,  0.0251,  0.0253, -0.0149,  0.0392,  0.0630,  0.0054, -0.0184,\n",
              "                       0.0054,  0.0160, -0.0413,  0.0163, -0.0188,  0.0051,  0.0129, -0.0084,\n",
              "                      -0.0016, -0.0158, -0.0126,  0.0217, -0.0139, -0.0264, -0.0348, -0.0790,\n",
              "                       0.0180,  0.0015, -0.0199, -0.0252,  0.0087,  0.0387,  0.0918, -0.0306,\n",
              "                      -0.0191, -0.0044,  0.0089, -0.0135,  0.0111, -0.0334, -0.0652, -0.0305,\n",
              "                      -0.0230,  0.0134, -0.0045, -0.0644, -0.0517, -0.0066, -0.0416,  0.0189,\n",
              "                      -0.0137,  0.0037, -0.0252, -0.0204, -0.0047, -0.0292,  0.0114,  0.0116,\n",
              "                      -0.0126, -0.0572, -0.0244, -0.0277,  0.0956, -0.0432,  0.0313, -0.0448,\n",
              "                      -0.0469,  0.0405,  0.0059,  0.0311,  0.0511,  0.0428,  0.0362, -0.0330,\n",
              "                      -0.0042,  0.0181, -0.0128,  0.0025, -0.0328, -0.0071,  0.0295,  0.0050,\n",
              "                      -0.0243,  0.0316, -0.0590, -0.0097,  0.0009, -0.0183,  0.0160,  0.0223,\n",
              "                      -0.0149, -0.0317,  0.0310,  0.0080, -0.0703, -0.0169, -0.0182, -0.0158,\n",
              "                      -0.0216, -0.0228, -0.0139,  0.0008, -0.0200,  0.0472,  0.0113, -0.0243,\n",
              "                      -0.0166, -0.0067, -0.0458,  0.0265, -0.0213,  0.0124, -0.0056, -0.0008,\n",
              "                       0.0282, -0.0268,  0.0139, -0.0767, -0.0410,  0.0594, -0.0029, -0.0180,\n",
              "                      -0.0098,  0.0464,  0.0240,  0.0081, -0.0043,  0.0316,  0.0048,  0.0280,\n",
              "                       0.0462,  0.0411, -0.0012, -0.0170, -0.0460,  0.0101,  0.0589,  0.0459,\n",
              "                       0.0653, -0.0490,  0.0510,  0.0091,  0.0508,  0.0290, -0.0091, -0.0013,\n",
              "                       0.0420,  0.0304, -0.0129,  0.0055,  0.0434, -0.0054,  0.0613,  0.0172,\n",
              "                      -0.0036, -0.0704, -0.0945,  0.0092, -0.0130, -0.0034,  0.0179, -0.0351,\n",
              "                       0.0373,  0.0267, -0.0175,  0.0359,  0.0219,  0.0083, -0.0357, -0.0689,\n",
              "                      -0.0579,  0.0282,  0.0194,  0.0012,  0.0893,  0.0241,  0.0045,  0.0166,\n",
              "                       0.0126,  0.0153, -0.0535,  0.0170,  0.0162, -0.0146, -0.0835, -0.0666,\n",
              "                      -0.0104,  0.0045, -0.0581, -0.0188, -0.0368, -0.0051,  0.0247, -0.0364,\n",
              "                       0.0428, -0.0021,  0.0828,  0.0091, -0.0041, -0.0515, -0.0323,  0.0079,\n",
              "                       0.0023,  0.0356,  0.0290,  0.0741, -0.0194,  0.0285, -0.0220, -0.0086,\n",
              "                       0.0237, -0.0137, -0.0094,  0.0375,  0.0137, -0.0050, -0.0923, -0.0178,\n",
              "                      -0.0937,  0.0045,  0.0078, -0.0148,  0.0537,  0.0165, -0.0730,  0.0053,\n",
              "                       0.0683,  0.0422, -0.0229,  0.0565,  0.0272,  0.0338, -0.0226,  0.0201,\n",
              "                       0.0259,  0.0182, -0.0163, -0.0126,  0.0151, -0.0362, -0.0003, -0.0194,\n",
              "                       0.0810,  0.0175,  0.0439, -0.0625, -0.0293, -0.0132,  0.0671, -0.0279])),\n",
              "             ('conv3.4.running_var',\n",
              "              tensor([0.9265, 0.9466, 0.9278, 0.9303, 0.9293, 0.9260, 0.9283, 0.9260, 0.9279,\n",
              "                      0.9261, 0.9397, 0.9343, 0.9441, 0.9273, 0.9269, 0.9306, 0.9264, 0.9295,\n",
              "                      0.9329, 0.9266, 0.9308, 0.9405, 0.9440, 0.9335, 0.9328, 0.9339, 0.9261,\n",
              "                      0.9313, 0.9305, 0.9278, 0.9257, 0.9290, 0.9325, 0.9253, 0.9338, 0.9354,\n",
              "                      0.9327, 0.9268, 0.9519, 0.9292, 0.9284, 0.9279, 0.9291, 0.9308, 0.9249,\n",
              "                      0.9263, 0.9286, 0.9308, 0.9375, 0.9254, 0.9311, 0.9390, 0.9348, 0.9355,\n",
              "                      0.9331, 0.9250, 0.9274, 0.9282, 0.9306, 0.9357, 0.9287, 0.9301, 0.9252,\n",
              "                      0.9317, 0.9296, 0.9297, 0.9235, 0.9296, 0.9378, 0.9240, 0.9275, 0.9385,\n",
              "                      0.9321, 0.9283, 0.9297, 0.9303, 0.9292, 0.9223, 0.9336, 0.9251, 0.9250,\n",
              "                      0.9304, 0.9300, 0.9348, 0.9329, 0.9298, 0.9318, 0.9298, 0.9280, 0.9280,\n",
              "                      0.9447, 0.9317, 0.9300, 0.9286, 0.9284, 0.9285, 0.9260, 0.9254, 0.9267,\n",
              "                      0.9302, 0.9360, 0.9325, 0.9309, 0.9270, 0.9329, 0.9383, 0.9256, 0.9319,\n",
              "                      0.9379, 0.9289, 0.9287, 0.9377, 0.9327, 0.9204, 0.9364, 0.9278, 0.9250,\n",
              "                      0.9331, 0.9283, 0.9330, 0.9216, 0.9273, 0.9297, 0.9334, 0.9317, 0.9436,\n",
              "                      0.9325, 0.9278, 0.9250, 0.9305, 0.9319, 0.9241, 0.9369, 0.9301, 0.9214,\n",
              "                      0.9324, 0.9358, 0.9283, 0.9270, 0.9296, 0.9386, 0.9326, 0.9267, 0.9372,\n",
              "                      0.9376, 0.9257, 0.9424, 0.9236, 0.9394, 0.9299, 0.9240, 0.9261, 0.9354,\n",
              "                      0.9282, 0.9303, 0.9349, 0.9242, 0.9351, 0.9465, 0.9284, 0.9287, 0.9303,\n",
              "                      0.9324, 0.9309, 0.9260, 0.9243, 0.9301, 0.9593, 0.9325, 0.9349, 0.9257,\n",
              "                      0.9353, 0.9302, 0.9300, 0.9317, 0.9303, 0.9324, 0.9302, 0.9295, 0.9285,\n",
              "                      0.9331, 0.9336, 0.9270, 0.9266, 0.9325, 0.9393, 0.9313, 0.9320, 0.9274,\n",
              "                      0.9374, 0.9277, 0.9297, 0.9390, 0.9259, 0.9357, 0.9301, 0.9369, 0.9242,\n",
              "                      0.9229, 0.9256, 0.9310, 0.9275, 0.9407, 0.9346, 0.9295, 0.9228, 0.9384,\n",
              "                      0.9242, 0.9304, 0.9346, 0.9238, 0.9407, 0.9295, 0.9331, 0.9267, 0.9207,\n",
              "                      0.9281, 0.9523, 0.9305, 0.9354, 0.9332, 0.9265, 0.9486, 0.9290, 0.9572,\n",
              "                      0.9293, 0.9278, 0.9335, 0.9353, 0.9304, 0.9287, 0.9254, 0.9336, 0.9355,\n",
              "                      0.9268, 0.9336, 0.9332, 0.9293, 0.9309, 0.9268, 0.9296, 0.9300, 0.9271,\n",
              "                      0.9290, 0.9381, 0.9256, 0.9259, 0.9320, 0.9383, 0.9345, 0.9323, 0.9648,\n",
              "                      0.9334, 0.9337, 0.9426, 0.9368])),\n",
              "             ('conv3.4.num_batches_tracked', tensor(1)),\n",
              "             ('conv4.0.weight',\n",
              "              tensor([[[[-2.7796e-02,  2.4026e-02, -2.8851e-02],\n",
              "                        [ 2.1701e-02,  4.9408e-04, -7.9013e-03],\n",
              "                        [ 1.4469e-02, -1.4730e-02, -1.5453e-02]],\n",
              "              \n",
              "                       [[ 1.5835e-02,  1.5441e-02, -2.0643e-02],\n",
              "                        [ 1.7721e-02, -1.9991e-02, -2.0564e-02],\n",
              "                        [ 2.3572e-02,  1.1438e-02,  2.6855e-02]],\n",
              "              \n",
              "                       [[-7.8443e-03,  2.1154e-02, -2.0182e-02],\n",
              "                        [-2.6365e-02,  6.3634e-03, -1.6405e-02],\n",
              "                        [-2.3936e-02,  3.4895e-03, -1.0899e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 6.7144e-03, -2.1784e-02, -2.3641e-02],\n",
              "                        [ 5.6171e-03, -3.9025e-03, -1.4373e-02],\n",
              "                        [-2.5176e-02, -2.7970e-02, -1.7605e-02]],\n",
              "              \n",
              "                       [[ 2.2176e-02, -2.0789e-02,  2.8761e-02],\n",
              "                        [ 1.8684e-02,  1.6136e-03, -1.5721e-02],\n",
              "                        [ 2.8982e-02, -1.2944e-03,  1.0761e-02]],\n",
              "              \n",
              "                       [[ 2.0986e-02,  1.7350e-02, -2.2380e-02],\n",
              "                        [ 6.3184e-03,  2.3548e-02, -2.2527e-02],\n",
              "                        [-2.6047e-02, -1.2994e-02,  9.9829e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 3.9733e-03, -2.3888e-02,  2.5679e-02],\n",
              "                        [-1.9375e-02, -1.8158e-02,  1.1355e-03],\n",
              "                        [-1.3597e-02,  4.7196e-03,  2.0916e-03]],\n",
              "              \n",
              "                       [[ 2.5901e-02,  1.9901e-03, -2.2630e-02],\n",
              "                        [-2.1815e-03,  1.6884e-02,  3.3413e-03],\n",
              "                        [-1.8890e-02,  2.4789e-03, -1.1571e-02]],\n",
              "              \n",
              "                       [[ 1.1484e-02, -2.0147e-02, -2.0892e-02],\n",
              "                        [ 3.4825e-03,  5.0934e-03,  2.2297e-02],\n",
              "                        [ 2.9502e-03, -1.7234e-02,  2.5533e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 8.1410e-03,  1.8418e-02, -2.5425e-02],\n",
              "                        [-2.6691e-02, -2.1449e-02,  1.5463e-03],\n",
              "                        [-5.0451e-03, -3.9578e-03,  1.9582e-02]],\n",
              "              \n",
              "                       [[-1.5270e-02,  4.8144e-03,  2.2135e-02],\n",
              "                        [-1.6253e-02,  4.4444e-04,  4.8340e-03],\n",
              "                        [ 1.6416e-02,  1.9390e-02,  2.1911e-02]],\n",
              "              \n",
              "                       [[-2.3070e-02, -7.2843e-03,  2.0068e-02],\n",
              "                        [-2.7756e-02,  7.3090e-03,  1.0182e-03],\n",
              "                        [ 2.1164e-02,  1.9902e-02,  4.5375e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[-7.8187e-03,  2.9006e-03, -1.4701e-02],\n",
              "                        [ 3.7088e-03, -8.2738e-04, -1.3265e-02],\n",
              "                        [ 1.7100e-02,  2.7967e-02, -2.0702e-02]],\n",
              "              \n",
              "                       [[-9.8305e-03,  1.8415e-02, -2.4537e-02],\n",
              "                        [-2.7830e-02, -7.7552e-03,  2.5046e-02],\n",
              "                        [-1.5616e-03,  1.0728e-02,  2.7209e-02]],\n",
              "              \n",
              "                       [[ 2.8780e-03, -2.4052e-02, -2.0176e-04],\n",
              "                        [ 1.1875e-02,  1.5972e-02, -9.3813e-03],\n",
              "                        [-2.2453e-02,  6.5761e-03,  2.3055e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-2.2329e-02, -2.0098e-02, -2.6985e-02],\n",
              "                        [ 1.2239e-02,  1.4843e-02,  1.9691e-02],\n",
              "                        [ 2.3544e-02,  1.4761e-02,  6.3663e-03]],\n",
              "              \n",
              "                       [[-8.4336e-03,  3.4443e-03,  1.8438e-02],\n",
              "                        [-2.8420e-02, -1.6729e-02, -1.2646e-02],\n",
              "                        [-1.9693e-02,  7.3261e-03, -3.1566e-03]],\n",
              "              \n",
              "                       [[-2.4645e-02,  1.8321e-02, -4.0397e-04],\n",
              "                        [ 1.5827e-02, -2.7231e-03,  1.6918e-02],\n",
              "                        [-4.4517e-03, -2.6751e-02,  1.1543e-02]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[-1.8405e-02, -1.8552e-02,  1.8426e-02],\n",
              "                        [-1.3316e-02,  2.8217e-02,  1.0513e-02],\n",
              "                        [-2.5079e-02, -1.7281e-02, -2.4874e-02]],\n",
              "              \n",
              "                       [[ 1.5393e-02,  1.5141e-02,  8.1510e-03],\n",
              "                        [-1.0526e-02, -6.3728e-03,  2.2631e-02],\n",
              "                        [-4.3041e-03,  1.6829e-02, -1.9059e-03]],\n",
              "              \n",
              "                       [[-1.7253e-04, -2.2169e-02, -6.4992e-03],\n",
              "                        [ 1.0485e-02, -6.5061e-03, -2.3382e-02],\n",
              "                        [ 1.9623e-02, -1.4793e-02, -1.6704e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.4066e-02,  2.4665e-02, -5.0497e-03],\n",
              "                        [ 2.0875e-02, -1.0805e-02, -2.3073e-02],\n",
              "                        [ 1.2661e-02,  4.0351e-03,  5.6650e-03]],\n",
              "              \n",
              "                       [[ 1.2731e-02, -2.2650e-02,  2.6930e-02],\n",
              "                        [-4.1111e-03, -1.9961e-02,  1.1840e-02],\n",
              "                        [-1.7793e-02,  1.6329e-02,  1.9701e-02]],\n",
              "              \n",
              "                       [[-1.7434e-02, -2.9420e-02, -1.8649e-02],\n",
              "                        [-1.8168e-02, -1.2947e-02, -1.6547e-02],\n",
              "                        [-9.9308e-03, -2.5214e-02, -2.1630e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 8.5052e-03, -1.7950e-03,  2.3441e-02],\n",
              "                        [ 2.5760e-02,  1.1606e-02, -2.7950e-02],\n",
              "                        [ 1.0198e-02,  1.8615e-02,  2.3244e-02]],\n",
              "              \n",
              "                       [[ 8.2701e-03, -1.5376e-02,  2.7322e-02],\n",
              "                        [-1.1399e-02, -2.7218e-03, -4.6930e-03],\n",
              "                        [ 2.6007e-02, -2.7376e-02, -3.1284e-03]],\n",
              "              \n",
              "                       [[-1.5890e-02,  2.0889e-02, -2.7867e-02],\n",
              "                        [ 4.3739e-03, -1.7800e-02,  9.8991e-03],\n",
              "                        [-1.5661e-02,  2.2950e-02,  2.1446e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-5.2321e-03,  1.5060e-04, -1.0188e-03],\n",
              "                        [-1.3277e-02,  4.4785e-05, -1.1725e-02],\n",
              "                        [-8.3341e-03, -4.0259e-03, -1.3080e-02]],\n",
              "              \n",
              "                       [[-1.8229e-02, -2.8540e-02,  2.2378e-02],\n",
              "                        [-1.5212e-02, -7.1600e-03, -1.1922e-02],\n",
              "                        [ 2.6081e-02, -1.3219e-02,  9.0255e-03]],\n",
              "              \n",
              "                       [[ 8.6661e-03, -2.5765e-02,  1.4840e-02],\n",
              "                        [-4.8043e-03, -2.8560e-02,  1.6038e-02],\n",
              "                        [-1.3894e-02, -2.0780e-02, -3.8516e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[-4.4747e-03,  3.5741e-03, -6.6015e-03],\n",
              "                        [-5.4328e-03,  1.6386e-03, -2.9370e-02],\n",
              "                        [-7.3637e-03, -6.2213e-04, -2.3295e-02]],\n",
              "              \n",
              "                       [[-5.6848e-03, -1.8434e-02, -2.3035e-02],\n",
              "                        [ 1.0670e-02, -1.5747e-02, -9.2061e-03],\n",
              "                        [-4.0514e-03, -2.8437e-02, -1.2153e-03]],\n",
              "              \n",
              "                       [[ 2.5137e-02,  1.2477e-02,  3.0466e-03],\n",
              "                        [-9.6344e-03, -2.9440e-02,  1.6218e-02],\n",
              "                        [ 2.4613e-02,  2.8550e-02, -2.7208e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.9688e-02,  2.7667e-02,  1.9568e-02],\n",
              "                        [ 2.4548e-02, -2.4779e-02,  3.1348e-03],\n",
              "                        [ 1.4525e-02,  2.3288e-02,  2.1040e-02]],\n",
              "              \n",
              "                       [[ 4.3424e-03, -2.1026e-02, -2.0865e-02],\n",
              "                        [ 1.1008e-02, -8.1889e-03, -2.1260e-02],\n",
              "                        [-2.9022e-02, -1.0430e-02,  9.4738e-03]],\n",
              "              \n",
              "                       [[-2.7689e-02,  6.7296e-03, -2.9341e-02],\n",
              "                        [ 1.7948e-02, -3.5770e-03, -1.0169e-02],\n",
              "                        [-1.6843e-02,  2.0853e-02, -1.5197e-02]]]])),\n",
              "             ('conv4.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv4.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.1.running_mean',\n",
              "              tensor([ 3.4584e-02,  2.3119e-02, -2.2534e-02, -1.1778e-02, -2.4848e-02,\n",
              "                      -4.7823e-02, -5.3002e-03,  7.3095e-02, -1.2356e-02,  7.9969e-02,\n",
              "                      -3.8963e-02, -5.5857e-02, -7.6550e-03,  3.5720e-02, -8.2133e-02,\n",
              "                      -7.3851e-02,  6.1116e-02, -2.5437e-02, -1.8158e-02, -2.4241e-02,\n",
              "                      -2.8625e-02, -1.4398e-02, -5.6487e-02, -6.3058e-02,  8.3148e-03,\n",
              "                      -2.6756e-02, -5.2130e-03, -6.3305e-03, -3.0417e-02, -3.1111e-03,\n",
              "                      -3.9508e-02,  1.3816e-01,  7.0559e-02, -3.8070e-02,  3.5430e-02,\n",
              "                       1.7910e-03, -2.2609e-02, -6.0398e-02,  2.2845e-02,  1.0481e-02,\n",
              "                      -2.1818e-02, -1.4488e-01, -3.5450e-02, -2.3223e-02, -1.9464e-02,\n",
              "                       6.4539e-02, -4.5917e-02, -4.5986e-02,  1.2306e-02,  6.5496e-03,\n",
              "                       4.5997e-02,  7.2640e-02,  7.2260e-02, -1.0134e-02, -1.1074e-02,\n",
              "                      -3.8448e-04, -2.9507e-02,  1.8113e-02, -6.2490e-02,  1.6001e-02,\n",
              "                       2.3022e-02,  4.6969e-02,  5.7258e-02,  1.8742e-02,  7.6051e-02,\n",
              "                       4.3549e-03, -9.5577e-03, -3.4162e-02,  1.8953e-02,  7.4133e-02,\n",
              "                       7.0931e-02,  2.9795e-02, -4.4242e-02, -5.5179e-02,  4.4770e-02,\n",
              "                      -8.0687e-04,  5.3065e-02, -6.2434e-03, -7.9971e-02, -1.7427e-02,\n",
              "                      -8.1926e-02,  8.5714e-02,  2.9192e-02,  1.3198e-04,  1.2726e-02,\n",
              "                       2.7124e-02, -1.1868e-01, -3.4410e-02,  9.0027e-02, -5.8555e-02,\n",
              "                      -7.7140e-03,  4.1965e-02,  1.8264e-02, -1.2048e-03,  1.5798e-02,\n",
              "                      -5.6083e-02,  1.6078e-03,  1.6010e-02, -5.6398e-02,  4.9475e-02,\n",
              "                      -4.1702e-02,  6.4930e-02,  1.4788e-02,  2.9091e-02, -9.5338e-03,\n",
              "                      -1.0862e-01, -6.8206e-02,  4.7571e-02,  2.0201e-02,  2.6911e-02,\n",
              "                      -2.6535e-02, -3.7579e-02, -2.6257e-02,  8.9725e-02, -7.8516e-02,\n",
              "                      -1.8628e-02, -6.6130e-02,  6.0762e-02, -7.3106e-02, -5.2071e-03,\n",
              "                      -5.6351e-02,  3.4736e-02, -1.2321e-01, -9.2188e-05, -4.1508e-02,\n",
              "                       4.5236e-02,  8.2103e-02, -5.7678e-02, -1.9189e-02, -9.2382e-02,\n",
              "                      -2.9862e-02,  2.8939e-02, -3.9306e-02,  7.7123e-03, -4.6539e-02,\n",
              "                       6.6586e-02, -3.4169e-02, -7.5563e-02,  1.5756e-03, -1.0328e-03,\n",
              "                      -3.5735e-02,  4.5458e-02, -2.7259e-02, -2.7233e-02,  8.9517e-02,\n",
              "                      -5.4590e-02,  2.9849e-02,  4.7647e-02, -7.3186e-02,  4.3764e-02,\n",
              "                       5.9393e-02, -1.3595e-03, -9.7098e-03, -5.1161e-02, -2.9166e-02,\n",
              "                       3.2667e-02, -2.0372e-02, -1.4902e-02, -6.9819e-02,  7.7717e-02,\n",
              "                      -2.8213e-02,  5.3438e-03,  3.8415e-02, -9.0636e-03, -2.5210e-02,\n",
              "                       2.9964e-02,  3.6985e-02, -9.6621e-02, -1.2668e-01, -3.7984e-02,\n",
              "                       5.0593e-02,  3.5540e-02, -1.9793e-02,  6.1875e-02, -3.0487e-02,\n",
              "                      -5.5370e-02, -5.7984e-02,  3.9973e-02,  7.7445e-02,  6.4789e-02,\n",
              "                      -5.1839e-02,  6.0551e-02,  7.8096e-02, -5.2331e-03, -4.6770e-02,\n",
              "                       2.6159e-02,  1.0681e-02,  6.4363e-02,  5.8203e-02,  5.6703e-02,\n",
              "                      -1.4162e-02, -3.2992e-03, -2.0499e-02, -5.9802e-03, -6.0485e-02,\n",
              "                       3.9027e-02, -6.0626e-02,  5.4127e-02, -6.9941e-02, -2.7508e-02,\n",
              "                      -2.6016e-02, -3.9611e-02, -2.5126e-02, -4.1015e-03, -1.6085e-02,\n",
              "                      -6.1584e-02,  8.6560e-02,  2.6844e-02,  3.3399e-02,  4.3995e-02,\n",
              "                      -3.3526e-02, -8.6000e-02, -3.7071e-02,  6.9233e-02,  9.2352e-03,\n",
              "                      -6.5728e-02, -4.0221e-02,  4.2680e-02, -7.3149e-03, -6.5301e-02,\n",
              "                       3.2226e-02, -1.3093e-02,  1.0417e-01, -7.1986e-04, -3.7268e-02,\n",
              "                       1.7337e-02, -4.6701e-02,  7.0086e-02,  1.9966e-02,  2.3820e-02,\n",
              "                      -2.4083e-03,  2.0904e-02, -5.9323e-02, -2.5376e-02, -1.0133e-02,\n",
              "                      -1.7849e-02,  1.5418e-01,  2.5690e-02,  7.2501e-02,  8.9526e-02,\n",
              "                       5.3800e-02,  1.2988e-02,  8.0873e-02,  2.5258e-02,  7.5550e-03,\n",
              "                      -1.0512e-01,  4.0830e-02,  1.6595e-02,  4.0433e-02,  1.0289e-03,\n",
              "                       9.6541e-03,  4.6432e-02,  7.5635e-03,  2.5046e-02,  1.2567e-02,\n",
              "                      -2.5635e-02, -1.8961e-02, -4.0196e-02,  4.1820e-03, -3.6513e-03,\n",
              "                       6.1196e-02,  5.0040e-02, -2.6387e-02, -7.8408e-03, -1.0682e-01,\n",
              "                       1.1796e-02, -4.6930e-02, -9.9428e-02, -3.1262e-02, -2.5021e-02,\n",
              "                      -2.7118e-02, -5.1092e-02, -7.0328e-02,  5.6237e-02, -2.9674e-02,\n",
              "                      -2.7717e-03, -1.1400e-03,  7.3268e-02,  1.8392e-02,  4.9765e-02,\n",
              "                       1.0714e-01, -3.8363e-02,  5.5709e-02, -3.0908e-02, -2.5843e-03,\n",
              "                       7.2615e-02, -1.5356e-02,  5.6617e-02, -2.7788e-03, -3.6677e-02,\n",
              "                      -1.2852e-01, -1.1094e-04, -2.9584e-02, -3.8294e-02, -2.1636e-01,\n",
              "                       7.2620e-02, -9.1523e-02, -8.8150e-03, -1.0568e-01, -2.1623e-03,\n",
              "                      -1.4545e-02,  4.4622e-02,  9.4207e-02,  2.2719e-02,  3.1164e-02,\n",
              "                       7.2590e-02, -8.8416e-02,  3.2630e-02, -7.3359e-04,  5.4624e-02,\n",
              "                       9.3715e-02,  6.0796e-02,  4.9398e-03, -3.9083e-02, -4.1053e-02,\n",
              "                      -1.0810e-02,  2.2999e-02, -1.0661e-01, -3.7450e-02, -8.2992e-03,\n",
              "                       9.2777e-02, -7.7314e-02, -3.8978e-02, -7.2934e-02, -2.5694e-02,\n",
              "                      -5.5032e-02, -3.6113e-02,  2.9350e-02, -3.0452e-02, -8.3399e-02,\n",
              "                       1.6998e-02, -4.1216e-02,  6.7879e-02, -2.4140e-02, -4.3917e-02,\n",
              "                       6.3614e-04,  2.7340e-02, -1.4168e-02,  1.2013e-01,  4.8802e-02,\n",
              "                      -2.8786e-02,  4.0711e-02, -2.0159e-02, -4.1702e-02,  3.3552e-02,\n",
              "                      -1.4302e-02,  3.0077e-02, -1.4078e-02, -1.3712e-02,  1.9141e-02,\n",
              "                      -2.9025e-02, -6.4172e-02,  9.2178e-02,  4.1114e-02, -5.0997e-02,\n",
              "                       2.7447e-02, -3.2537e-02, -6.1846e-02,  4.1928e-03, -1.3639e-02,\n",
              "                       1.0693e-01, -8.9831e-03, -2.3661e-02,  9.5327e-02,  8.4358e-02,\n",
              "                       4.8547e-02,  3.0499e-02,  7.5142e-03,  2.9405e-03,  4.7889e-02,\n",
              "                      -4.3806e-02,  1.1821e-02, -1.0320e-02,  4.0263e-03, -4.9619e-02,\n",
              "                      -7.5179e-02, -5.7541e-02, -5.9175e-03, -1.9945e-02, -7.6223e-02,\n",
              "                      -2.8048e-02, -1.3755e-02, -4.9613e-03,  5.9930e-03, -1.8729e-03,\n",
              "                      -2.5537e-02,  1.8450e-02, -4.3274e-03, -6.1593e-02, -2.0473e-02,\n",
              "                       2.1718e-02, -3.7525e-02, -1.4866e-02, -1.0935e-01,  1.1791e-02,\n",
              "                       1.4005e-02,  3.3091e-02,  2.3348e-02,  9.9288e-03,  5.4102e-02,\n",
              "                      -1.4386e-02, -1.5221e-02, -2.4160e-02,  3.7137e-02,  1.0689e-01,\n",
              "                       8.8849e-03, -7.1606e-02,  2.3262e-02,  2.3427e-02,  3.2837e-02,\n",
              "                       5.1652e-03,  8.3245e-02,  8.1302e-03, -2.8199e-02, -3.2768e-03,\n",
              "                      -1.8688e-02, -6.0295e-02, -1.1739e-02,  6.6027e-03,  9.1433e-02,\n",
              "                       1.1523e-02,  2.0884e-03, -1.2525e-02,  4.5311e-02, -4.7882e-02,\n",
              "                       2.4122e-02, -4.9391e-02,  2.4203e-02, -3.6926e-03, -1.3361e-02,\n",
              "                      -3.2236e-02,  3.0028e-02, -3.5397e-02, -2.8037e-02,  3.9258e-02,\n",
              "                       1.9321e-03,  8.2527e-03,  4.9805e-02, -3.6791e-02, -2.1085e-02,\n",
              "                       7.6634e-02,  5.5167e-03, -2.8604e-02,  3.1547e-02, -6.1427e-02,\n",
              "                      -1.8118e-02, -6.6474e-03, -6.9112e-02, -5.0562e-02, -3.4964e-02,\n",
              "                      -5.8289e-03, -3.5186e-02,  1.5215e-01, -2.2189e-02, -9.0821e-03,\n",
              "                       1.3694e-01, -6.0382e-02,  4.3372e-02,  3.4330e-02,  6.0557e-02,\n",
              "                       8.3273e-02,  1.6698e-02, -2.4859e-02,  1.2090e-01,  4.6948e-02,\n",
              "                       2.5528e-02, -3.9082e-02, -5.1274e-02, -3.2793e-02,  5.5170e-02,\n",
              "                      -1.1159e-01, -7.0859e-02,  4.6150e-02,  4.8468e-02, -2.5560e-02,\n",
              "                      -2.3281e-02,  1.6687e-02,  6.7210e-02,  2.6380e-02, -9.5016e-03,\n",
              "                       9.2599e-02, -5.5027e-02, -2.0355e-02,  2.6379e-02,  2.8935e-02,\n",
              "                      -3.5409e-02, -4.1709e-02,  9.4226e-02,  7.6106e-03, -1.4774e-03,\n",
              "                      -2.0137e-02, -3.0194e-02,  9.4737e-03, -4.5693e-02, -8.8944e-02,\n",
              "                      -5.2606e-02, -1.9629e-02,  7.9254e-03,  1.0200e-01,  2.5442e-02,\n",
              "                      -2.3187e-02, -5.3052e-02,  4.4208e-02, -3.9722e-02,  3.3566e-02,\n",
              "                      -4.7076e-02, -7.9470e-02,  3.9437e-02, -4.7689e-02,  3.9561e-02,\n",
              "                      -2.6759e-03, -2.8057e-03])),\n",
              "             ('conv4.1.running_var',\n",
              "              tensor([0.9377, 0.9283, 0.9220, 0.9230, 0.9447, 0.9435, 0.9328, 0.9653, 0.9532,\n",
              "                      0.9283, 0.9346, 0.9299, 0.9254, 0.9451, 0.9395, 0.9458, 0.9516, 0.9327,\n",
              "                      0.9318, 0.9259, 0.9459, 0.9440, 0.9361, 0.9535, 0.9239, 0.9409, 0.9344,\n",
              "                      0.9246, 0.9258, 0.9267, 0.9475, 0.9668, 0.9565, 0.9302, 0.9259, 0.9253,\n",
              "                      0.9260, 0.9564, 0.9346, 0.9469, 0.9346, 0.9499, 0.9343, 0.9399, 0.9431,\n",
              "                      0.9391, 0.9332, 0.9261, 0.9208, 0.9261, 0.9554, 0.9531, 0.9407, 0.9214,\n",
              "                      0.9419, 0.9320, 0.9513, 0.9275, 0.9505, 0.9278, 0.9246, 0.9464, 0.9258,\n",
              "                      0.9330, 0.9516, 0.9223, 0.9253, 0.9520, 0.9297, 0.9331, 0.9514, 0.9390,\n",
              "                      0.9442, 0.9373, 0.9260, 0.9299, 0.9421, 0.9341, 0.9365, 0.9341, 0.9467,\n",
              "                      0.9280, 0.9436, 0.9473, 0.9432, 0.9304, 0.9593, 0.9304, 0.9445, 0.9313,\n",
              "                      0.9392, 0.9326, 0.9496, 0.9370, 0.9390, 0.9405, 0.9277, 0.9379, 0.9628,\n",
              "                      0.9369, 0.9354, 0.9406, 0.9642, 0.9238, 0.9385, 0.9557, 0.9358, 0.9474,\n",
              "                      0.9316, 0.9420, 0.9279, 0.9451, 0.9250, 0.9530, 0.9362, 0.9261, 0.9641,\n",
              "                      0.9396, 0.9386, 0.9242, 0.9400, 0.9324, 0.9436, 0.9309, 0.9329, 0.9308,\n",
              "                      0.9400, 0.9424, 0.9320, 0.9405, 0.9249, 0.9249, 0.9317, 0.9373, 0.9560,\n",
              "                      0.9362, 0.9410, 0.9592, 0.9293, 0.9304, 0.9308, 0.9382, 0.9300, 0.9299,\n",
              "                      0.9295, 0.9497, 0.9296, 0.9254, 0.9392, 0.9322, 0.9509, 0.9222, 0.9325,\n",
              "                      0.9492, 0.9241, 0.9545, 0.9300, 0.9348, 0.9329, 0.9330, 0.9362, 0.9403,\n",
              "                      0.9328, 0.9368, 0.9230, 0.9343, 0.9480, 0.9387, 0.9616, 0.9255, 0.9356,\n",
              "                      0.9330, 0.9398, 0.9473, 0.9220, 0.9373, 0.9344, 0.9240, 0.9291, 0.9339,\n",
              "                      0.9357, 0.9399, 0.9640, 0.9361, 0.9266, 0.9376, 0.9453, 0.9317, 0.9389,\n",
              "                      0.9504, 0.9463, 0.9229, 0.9382, 0.9395, 0.9456, 0.9351, 0.9332, 0.9371,\n",
              "                      0.9433, 0.9369, 0.9290, 0.9367, 0.9452, 0.9288, 0.9329, 0.9348, 0.9326,\n",
              "                      0.9391, 0.9279, 0.9358, 0.9329, 0.9351, 0.9360, 0.9418, 0.9256, 0.9580,\n",
              "                      0.9261, 0.9312, 0.9307, 0.9396, 0.9419, 0.9329, 0.9770, 0.9386, 0.9221,\n",
              "                      0.9291, 0.9517, 0.9313, 0.9263, 0.9233, 0.9301, 0.9214, 0.9321, 0.9333,\n",
              "                      0.9283, 0.9212, 0.9525, 0.9296, 0.9366, 0.9544, 0.9400, 0.9274, 0.9334,\n",
              "                      0.9288, 0.9240, 0.9413, 0.9293, 0.9382, 0.9236, 0.9443, 0.9452, 0.9266,\n",
              "                      0.9449, 0.9269, 0.9555, 0.9491, 0.9283, 0.9295, 0.9280, 0.9513, 0.9368,\n",
              "                      0.9336, 0.9294, 0.9358, 0.9409, 0.9287, 0.9427, 0.9388, 0.9439, 0.9335,\n",
              "                      0.9256, 0.9393, 0.9417, 0.9489, 0.9547, 0.9312, 0.9335, 0.9395, 0.9330,\n",
              "                      0.9585, 0.9453, 0.9307, 0.9275, 0.9237, 0.9257, 0.9388, 0.9400, 0.9363,\n",
              "                      0.9357, 0.9396, 1.0009, 0.9348, 0.9316, 0.9748, 1.0171, 0.9479, 0.9620,\n",
              "                      0.9369, 0.9655, 0.9258, 0.9450, 0.9260, 0.9476, 0.9351, 0.9313, 0.9390,\n",
              "                      0.9377, 0.9306, 0.9316, 0.9326, 0.9414, 0.9240, 0.9291, 0.9342, 0.9275,\n",
              "                      0.9436, 0.9562, 0.9384, 0.9463, 0.9373, 0.9423, 0.9405, 0.9298, 0.9319,\n",
              "                      0.9281, 0.9541, 0.9459, 0.9264, 0.9242, 0.9529, 0.9228, 0.9461, 0.9472,\n",
              "                      0.9495, 0.9258, 0.9302, 0.9364, 0.9262, 0.9436, 0.9400, 0.9234, 0.9465,\n",
              "                      0.9395, 0.9382, 0.9342, 0.9221, 0.9222, 0.9361, 0.9434, 0.9508, 0.9316,\n",
              "                      0.9413, 0.9434, 0.9477, 0.9335, 0.9295, 0.9566, 0.9391, 0.9455, 0.9279,\n",
              "                      0.9392, 0.9362, 0.9224, 0.9407, 0.9624, 0.9372, 0.9343, 0.9284, 0.9325,\n",
              "                      0.9353, 0.9314, 0.9799, 0.9718, 0.9313, 0.9333, 0.9343, 0.9298, 0.9558,\n",
              "                      0.9437, 0.9396, 0.9308, 0.9568, 0.9350, 0.9379, 0.9380, 0.9316, 0.9334,\n",
              "                      0.9494, 0.9457, 0.9308, 0.9333, 0.9384, 0.9347, 0.9482, 0.9254, 0.9367,\n",
              "                      0.9265, 0.9372, 0.9295, 0.9436, 0.9375, 0.9223, 0.9343, 0.9395, 0.9632,\n",
              "                      0.9294, 0.9420, 0.9280, 0.9263, 0.9239, 0.9408, 0.9444, 0.9339, 0.9292,\n",
              "                      0.9352, 0.9290, 0.9621, 0.9407, 0.9398, 0.9524, 0.9357, 0.9252, 0.9218,\n",
              "                      0.9681, 0.9299, 0.9403, 0.9428, 0.9244, 0.9330, 0.9287, 0.9260, 0.9323,\n",
              "                      0.9273, 0.9240, 0.9388, 0.9209, 0.9270, 0.9433, 0.9285, 0.9308, 0.9262,\n",
              "                      0.9332, 0.9431, 0.9320, 0.9252, 0.9305, 0.9348, 0.9408, 0.9311, 0.9320,\n",
              "                      0.9441, 0.9650, 0.9916, 0.9294, 0.9288, 0.9644, 0.9358, 0.9469, 0.9376,\n",
              "                      0.9445, 0.9490, 0.9429, 0.9295, 0.9264, 0.9333, 0.9324, 0.9300, 0.9432,\n",
              "                      0.9335, 0.9379, 0.9391, 0.9382, 0.9318, 0.9309, 0.9258, 0.9395, 0.9497,\n",
              "                      0.9489, 0.9274, 0.9361, 0.9445, 0.9559, 0.9369, 0.9349, 0.9299, 0.9254,\n",
              "                      0.9378, 0.9451, 0.9298, 0.9283, 0.9347, 0.9286, 0.9439, 0.9436, 0.9374,\n",
              "                      0.9235, 0.9576, 0.9394, 0.9556, 0.9786, 0.9262, 0.9406, 0.9434, 0.9328,\n",
              "                      0.9319, 0.9428, 0.9846, 0.9342, 0.9402, 0.9366, 0.9249, 0.9489])),\n",
              "             ('conv4.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv4.3.weight',\n",
              "              tensor([[[[-0.0240, -0.0020,  0.0009],\n",
              "                        [-0.0016,  0.0019,  0.0206],\n",
              "                        [ 0.0160, -0.0022, -0.0130]],\n",
              "              \n",
              "                       [[ 0.0196,  0.0019,  0.0073],\n",
              "                        [ 0.0186, -0.0196, -0.0123],\n",
              "                        [-0.0103, -0.0152,  0.0080]],\n",
              "              \n",
              "                       [[ 0.0002, -0.0059,  0.0031],\n",
              "                        [-0.0226, -0.0087,  0.0089],\n",
              "                        [-0.0190,  0.0172,  0.0194]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0186,  0.0098, -0.0254],\n",
              "                        [ 0.0155, -0.0083,  0.0105],\n",
              "                        [ 0.0195, -0.0112, -0.0154]],\n",
              "              \n",
              "                       [[-0.0200,  0.0151,  0.0165],\n",
              "                        [ 0.0024,  0.0129,  0.0169],\n",
              "                        [-0.0164, -0.0085, -0.0073]],\n",
              "              \n",
              "                       [[ 0.0176, -0.0074, -0.0020],\n",
              "                        [-0.0199,  0.0111, -0.0148],\n",
              "                        [ 0.0086,  0.0110,  0.0146]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0069,  0.0084,  0.0075],\n",
              "                        [-0.0136, -0.0208,  0.0160],\n",
              "                        [-0.0204, -0.0233,  0.0162]],\n",
              "              \n",
              "                       [[ 0.0091, -0.0154,  0.0024],\n",
              "                        [-0.0080, -0.0051, -0.0203],\n",
              "                        [ 0.0228, -0.0164,  0.0056]],\n",
              "              \n",
              "                       [[-0.0035, -0.0113,  0.0006],\n",
              "                        [-0.0237, -0.0135, -0.0094],\n",
              "                        [-0.0212, -0.0020,  0.0079]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0251, -0.0070,  0.0108],\n",
              "                        [ 0.0255,  0.0250,  0.0067],\n",
              "                        [-0.0181, -0.0163,  0.0185]],\n",
              "              \n",
              "                       [[-0.0016,  0.0071,  0.0205],\n",
              "                        [ 0.0109, -0.0216,  0.0196],\n",
              "                        [ 0.0131,  0.0112,  0.0053]],\n",
              "              \n",
              "                       [[-0.0158, -0.0036,  0.0094],\n",
              "                        [-0.0073,  0.0186, -0.0065],\n",
              "                        [-0.0075, -0.0226,  0.0122]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0207, -0.0180,  0.0220],\n",
              "                        [-0.0134,  0.0107, -0.0230],\n",
              "                        [ 0.0184, -0.0155, -0.0100]],\n",
              "              \n",
              "                       [[-0.0181,  0.0209, -0.0178],\n",
              "                        [ 0.0141, -0.0247, -0.0132],\n",
              "                        [ 0.0219, -0.0149,  0.0178]],\n",
              "              \n",
              "                       [[ 0.0144,  0.0191, -0.0208],\n",
              "                        [ 0.0154, -0.0247,  0.0224],\n",
              "                        [-0.0041,  0.0050, -0.0118]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0182,  0.0233, -0.0067],\n",
              "                        [-0.0144, -0.0015,  0.0132],\n",
              "                        [ 0.0030, -0.0081, -0.0007]],\n",
              "              \n",
              "                       [[ 0.0157, -0.0076, -0.0216],\n",
              "                        [-0.0188, -0.0059, -0.0049],\n",
              "                        [-0.0004,  0.0202, -0.0242]],\n",
              "              \n",
              "                       [[-0.0201, -0.0026, -0.0112],\n",
              "                        [ 0.0132, -0.0216,  0.0188],\n",
              "                        [ 0.0177,  0.0155, -0.0166]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0093,  0.0243, -0.0037],\n",
              "                        [ 0.0103, -0.0242, -0.0244],\n",
              "                        [ 0.0112, -0.0157,  0.0254]],\n",
              "              \n",
              "                       [[ 0.0141,  0.0128,  0.0025],\n",
              "                        [ 0.0230, -0.0188, -0.0241],\n",
              "                        [-0.0022,  0.0007, -0.0087]],\n",
              "              \n",
              "                       [[ 0.0221, -0.0095,  0.0218],\n",
              "                        [ 0.0188,  0.0219,  0.0026],\n",
              "                        [-0.0084,  0.0094, -0.0184]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0209, -0.0095, -0.0254],\n",
              "                        [-0.0036,  0.0138, -0.0139],\n",
              "                        [ 0.0104,  0.0022,  0.0013]],\n",
              "              \n",
              "                       [[-0.0211,  0.0094,  0.0086],\n",
              "                        [-0.0098,  0.0027, -0.0144],\n",
              "                        [-0.0169, -0.0105,  0.0089]],\n",
              "              \n",
              "                       [[ 0.0083,  0.0242, -0.0196],\n",
              "                        [ 0.0189,  0.0050,  0.0167],\n",
              "                        [ 0.0073, -0.0068,  0.0055]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0054,  0.0167,  0.0089],\n",
              "                        [-0.0150,  0.0229, -0.0029],\n",
              "                        [ 0.0045, -0.0178, -0.0005]],\n",
              "              \n",
              "                       [[ 0.0221,  0.0142, -0.0119],\n",
              "                        [ 0.0125, -0.0128,  0.0158],\n",
              "                        [ 0.0137, -0.0122,  0.0022]],\n",
              "              \n",
              "                       [[ 0.0226,  0.0013,  0.0244],\n",
              "                        [ 0.0044,  0.0102,  0.0129],\n",
              "                        [-0.0075, -0.0046,  0.0076]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0064,  0.0158,  0.0075],\n",
              "                        [-0.0068,  0.0147, -0.0082],\n",
              "                        [-0.0219,  0.0232, -0.0128]],\n",
              "              \n",
              "                       [[ 0.0162, -0.0094, -0.0097],\n",
              "                        [ 0.0057,  0.0029,  0.0109],\n",
              "                        [ 0.0238, -0.0030,  0.0245]],\n",
              "              \n",
              "                       [[ 0.0152, -0.0188, -0.0076],\n",
              "                        [ 0.0023,  0.0094, -0.0161],\n",
              "                        [ 0.0002,  0.0116,  0.0207]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0168,  0.0215, -0.0213],\n",
              "                        [-0.0209, -0.0151, -0.0239],\n",
              "                        [-0.0150, -0.0104, -0.0166]],\n",
              "              \n",
              "                       [[-0.0018,  0.0129,  0.0039],\n",
              "                        [ 0.0065,  0.0075,  0.0185],\n",
              "                        [-0.0101, -0.0179, -0.0140]],\n",
              "              \n",
              "                       [[-0.0051,  0.0076,  0.0123],\n",
              "                        [-0.0183,  0.0013, -0.0243],\n",
              "                        [-0.0123, -0.0023,  0.0199]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0058,  0.0033,  0.0178],\n",
              "                        [ 0.0164, -0.0212, -0.0021],\n",
              "                        [-0.0041, -0.0140, -0.0255]],\n",
              "              \n",
              "                       [[ 0.0162, -0.0250, -0.0142],\n",
              "                        [-0.0013,  0.0183, -0.0047],\n",
              "                        [-0.0013, -0.0129, -0.0139]],\n",
              "              \n",
              "                       [[ 0.0217,  0.0117, -0.0097],\n",
              "                        [-0.0040,  0.0115,  0.0218],\n",
              "                        [-0.0169, -0.0022,  0.0058]]]])),\n",
              "             ('conv4.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv4.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv4.4.running_mean',\n",
              "              tensor([-3.8604e-02,  6.9143e-03, -2.6344e-02,  1.7318e-02,  2.3792e-02,\n",
              "                       1.0552e-02,  7.8734e-03, -4.3727e-02, -1.5096e-02,  1.5231e-02,\n",
              "                       1.5369e-02,  2.6147e-02,  3.9430e-02,  7.5809e-03, -5.2640e-02,\n",
              "                      -6.2492e-03, -1.5882e-02,  2.2917e-02, -1.5335e-02, -2.4981e-02,\n",
              "                      -3.9794e-02,  2.9959e-02, -2.3259e-02,  3.1520e-02,  3.4071e-02,\n",
              "                      -5.3607e-02, -1.4011e-02, -1.7462e-02,  8.3937e-03,  3.9004e-02,\n",
              "                       4.5293e-02, -3.0952e-02,  1.4257e-02, -5.1087e-02, -1.8683e-02,\n",
              "                      -8.8819e-03, -4.7900e-03,  3.2344e-02,  4.3479e-02,  3.9216e-02,\n",
              "                      -4.4297e-02,  2.5772e-02, -5.2588e-02,  4.5180e-02, -2.0730e-02,\n",
              "                      -7.7209e-03,  1.2898e-02,  9.8501e-04, -2.2349e-02,  1.0005e-02,\n",
              "                      -2.0006e-02, -2.2291e-03,  2.0213e-02,  2.2472e-02,  1.2768e-02,\n",
              "                       8.7016e-03,  2.6967e-02, -9.2654e-03, -3.1211e-02,  3.6463e-02,\n",
              "                      -1.4427e-02,  4.7758e-02, -1.2388e-02, -3.6637e-02,  2.8036e-02,\n",
              "                       4.7261e-04,  4.3187e-03,  2.5639e-03, -2.3765e-02,  1.1938e-02,\n",
              "                       1.1721e-02,  1.0520e-02,  4.6823e-02,  1.2962e-03,  2.7161e-02,\n",
              "                       1.5929e-02, -5.3663e-03, -3.2298e-02, -2.6667e-02,  2.3900e-03,\n",
              "                      -2.4536e-02, -1.0056e-02, -4.9647e-03, -9.9574e-03, -1.3312e-02,\n",
              "                       3.9436e-03,  2.0278e-04,  9.8008e-03,  5.5752e-03,  1.3032e-02,\n",
              "                      -2.7558e-03, -4.3875e-02, -1.1458e-02, -3.2037e-02,  2.5735e-02,\n",
              "                      -5.2222e-03, -4.2250e-02, -3.0439e-02, -1.3289e-02, -1.0048e-02,\n",
              "                       6.0858e-02, -4.9197e-03,  2.3622e-02, -3.4048e-03, -4.2560e-02,\n",
              "                       3.5396e-02,  5.6735e-02, -1.0818e-02, -2.1199e-02,  1.4954e-02,\n",
              "                       3.2882e-02,  2.3677e-02, -7.8593e-03, -1.0314e-02,  5.4304e-02,\n",
              "                       8.0752e-02, -3.3727e-02, -7.0025e-02, -2.7908e-02, -1.2786e-02,\n",
              "                      -5.1517e-02, -2.1720e-02,  7.4875e-04,  4.7550e-04,  3.8741e-02,\n",
              "                       2.5396e-02,  5.8336e-03,  1.0462e-02,  6.7971e-03, -6.0341e-02,\n",
              "                       1.2361e-02, -8.9538e-03,  4.5498e-02,  1.6747e-02, -2.4729e-02,\n",
              "                      -2.0812e-02,  1.5986e-02,  3.9665e-02,  4.8131e-02, -9.3919e-03,\n",
              "                      -4.5115e-02,  3.5056e-02,  2.9743e-02,  1.7375e-02, -1.7139e-02,\n",
              "                       4.6107e-02, -4.7262e-02, -2.6208e-02, -5.5159e-03,  6.8687e-03,\n",
              "                      -1.9948e-02,  2.7834e-02, -1.3696e-02, -5.3664e-03,  2.8535e-02,\n",
              "                       6.2175e-03, -3.5301e-02, -2.1990e-02,  2.3362e-02,  1.4808e-02,\n",
              "                      -1.7054e-02,  4.4967e-02,  1.9023e-02, -2.2428e-02, -2.2444e-02,\n",
              "                       1.2784e-02,  6.9945e-05, -1.6680e-03,  2.9178e-02,  8.3326e-03,\n",
              "                       4.2445e-02, -7.8574e-03,  3.8281e-02, -3.1732e-03, -1.2124e-02,\n",
              "                       1.7565e-02, -1.3541e-02,  9.7430e-03, -2.7983e-02,  3.0310e-02,\n",
              "                       1.0149e-02,  2.1342e-02, -8.1136e-03,  1.5977e-02,  2.6356e-02,\n",
              "                       9.5739e-02, -3.2750e-02,  7.5886e-04, -6.6445e-02, -2.0307e-02,\n",
              "                      -4.8132e-02,  2.2946e-02, -2.3831e-02,  3.0523e-02, -7.6533e-03,\n",
              "                      -3.1012e-02, -1.7145e-02,  1.8199e-02,  1.0413e-02,  6.1191e-03,\n",
              "                      -6.1374e-03, -4.6118e-02, -9.8150e-03, -2.9162e-02,  2.7481e-02,\n",
              "                       3.1189e-02, -2.2722e-02, -2.4265e-02, -1.0045e-02, -4.2642e-02,\n",
              "                       6.5367e-03, -4.5150e-02, -2.1868e-03, -3.0798e-02, -1.6098e-02,\n",
              "                      -2.9237e-03,  1.2463e-02,  1.7513e-02, -4.5512e-04, -2.6227e-02,\n",
              "                      -1.0712e-02,  4.2857e-02,  3.8901e-02, -4.6172e-02,  6.3665e-03,\n",
              "                      -7.2929e-04, -6.4933e-03, -4.1487e-02, -3.4375e-02, -2.7482e-02,\n",
              "                      -1.9232e-02, -1.4740e-02, -1.8010e-02,  3.2391e-02, -4.6157e-03,\n",
              "                      -2.8646e-02,  1.8041e-02,  5.6097e-02,  7.4295e-03, -3.0214e-02,\n",
              "                      -2.4865e-02,  2.9582e-02, -1.6389e-02, -1.1835e-02,  1.1966e-02,\n",
              "                      -1.3368e-02,  3.0978e-02,  9.5685e-03, -2.8374e-02, -1.0360e-02,\n",
              "                       2.4559e-02, -6.7049e-03, -7.8146e-03, -1.0113e-02,  1.2428e-03,\n",
              "                       4.2408e-02,  6.7654e-02,  4.7724e-02, -9.7678e-03,  7.7431e-02,\n",
              "                       6.2117e-02, -7.4090e-03,  3.3539e-02, -1.3265e-02,  1.9508e-02,\n",
              "                      -9.3119e-03,  3.5883e-02,  4.2623e-02,  1.4367e-02, -1.8012e-02,\n",
              "                       1.2733e-02,  3.4341e-02, -9.0065e-03, -2.9184e-02,  1.6449e-02,\n",
              "                      -1.7912e-02, -7.6695e-02, -5.1354e-02,  2.4696e-02,  5.8465e-02,\n",
              "                       3.1205e-02,  1.5139e-02,  1.9514e-02, -2.9534e-02, -1.5281e-02,\n",
              "                      -9.4115e-03,  3.0185e-03,  9.1273e-03, -6.9486e-03, -5.8231e-03,\n",
              "                      -2.9615e-02, -3.3894e-02,  2.1572e-02, -8.7983e-03, -3.0173e-03,\n",
              "                       4.0019e-02,  4.2734e-02, -5.4015e-03, -1.9447e-02, -1.2577e-02,\n",
              "                      -5.4264e-03,  2.9753e-02,  1.6383e-02,  3.3350e-02, -1.8565e-02,\n",
              "                       4.2166e-03, -1.9764e-02,  1.1098e-02,  5.5020e-02, -1.6206e-02,\n",
              "                      -3.0475e-02,  5.3959e-02,  3.0276e-02, -1.8220e-02,  3.5549e-03,\n",
              "                       1.9586e-02,  1.4893e-02,  2.3985e-02,  1.8023e-02,  7.1450e-03,\n",
              "                      -4.4908e-02, -1.4458e-02, -2.1406e-02, -3.9217e-02, -3.5393e-03,\n",
              "                      -4.0333e-02,  3.5304e-03,  1.9754e-02,  2.8848e-02,  2.0479e-02,\n",
              "                       2.1265e-02, -1.4717e-02,  2.0680e-03,  2.0644e-02, -1.0875e-02,\n",
              "                       1.1829e-02, -8.8308e-03,  5.5963e-03, -1.2706e-02, -1.9066e-02,\n",
              "                      -3.3249e-02,  1.4255e-03,  1.3194e-02,  3.2108e-02, -4.3002e-02,\n",
              "                       2.8568e-02, -4.2354e-02, -2.5734e-02,  9.2301e-03,  3.9417e-03,\n",
              "                      -4.0513e-02,  1.1547e-02, -3.6131e-03,  5.7518e-02,  1.1680e-02,\n",
              "                       7.2384e-03, -6.0800e-02, -6.7743e-03,  1.0540e-02, -2.4172e-02,\n",
              "                       5.8671e-03,  2.0620e-02, -6.5495e-03,  2.7257e-02, -6.2753e-03,\n",
              "                       3.8756e-02,  1.9389e-02, -2.3355e-02,  2.5558e-02,  4.2575e-02,\n",
              "                      -1.3537e-02, -5.2731e-02, -1.5226e-03, -2.1662e-02,  7.0113e-03,\n",
              "                      -1.6301e-03, -1.0655e-02, -2.6723e-03,  6.5119e-03, -3.5868e-02,\n",
              "                      -6.3604e-02, -5.2169e-03, -8.7593e-03,  1.8870e-02,  2.6433e-02,\n",
              "                      -1.8749e-02, -1.5418e-02, -2.3804e-02,  1.5569e-02, -2.4141e-03,\n",
              "                      -1.8543e-02, -5.6425e-03,  4.3254e-02, -1.6775e-02,  4.1472e-02,\n",
              "                       1.2507e-02, -3.9064e-02,  4.8963e-04, -5.0264e-02, -1.6638e-02,\n",
              "                       2.3625e-02, -8.6950e-03,  4.3102e-02,  1.7304e-02,  4.3124e-02,\n",
              "                      -3.5295e-02, -4.0648e-04,  5.0481e-02,  2.6384e-02,  4.3402e-02,\n",
              "                      -1.7876e-02, -9.7669e-03, -6.6501e-03,  1.6930e-02, -1.7990e-02,\n",
              "                       5.2854e-02, -1.8004e-02,  2.6897e-02,  2.0184e-02,  3.1864e-02,\n",
              "                      -4.3169e-02,  5.3726e-02,  1.8027e-02, -5.5824e-02,  1.7084e-02,\n",
              "                       1.9136e-02,  1.4617e-02,  3.4892e-02, -3.9516e-02, -3.8094e-02,\n",
              "                       2.0281e-03,  5.5439e-02, -2.1259e-02,  7.3944e-03, -1.2581e-02,\n",
              "                      -6.6829e-02,  1.2841e-02,  7.3499e-03, -2.3091e-02, -5.9450e-02,\n",
              "                       3.9450e-02, -3.5537e-02,  1.8233e-02, -1.3262e-02, -1.0825e-02,\n",
              "                      -5.1044e-03,  4.0838e-02,  2.1822e-02, -2.1354e-03,  1.5584e-02,\n",
              "                       2.0973e-02, -2.8212e-02,  2.7106e-03, -3.6848e-03,  5.2875e-03,\n",
              "                      -5.7969e-02,  7.1367e-03,  1.8947e-02,  1.7157e-03,  2.8194e-02,\n",
              "                       3.3533e-02, -3.9995e-02,  5.0047e-02,  4.6004e-02, -1.2493e-02,\n",
              "                      -3.6760e-03,  3.5530e-02,  1.8281e-02,  3.1554e-02,  3.3036e-02,\n",
              "                       1.6257e-02, -8.5256e-02, -1.1355e-02, -1.1549e-02,  1.4683e-02,\n",
              "                       8.2121e-03,  1.8819e-02,  1.5022e-02,  3.9029e-03, -7.7723e-03,\n",
              "                       2.3017e-02,  3.8454e-02,  1.0595e-02,  7.2154e-03, -1.7510e-02,\n",
              "                       2.0093e-02,  2.0396e-02, -2.2477e-02,  1.8489e-02,  3.5436e-02,\n",
              "                       1.3342e-02,  3.6116e-02, -5.3615e-03, -3.8658e-02, -1.2717e-02,\n",
              "                      -1.3131e-02, -1.9214e-02, -3.1452e-02, -9.4374e-03,  1.1856e-02,\n",
              "                       2.1040e-02,  2.3165e-02,  5.8038e-03, -4.9505e-02, -4.5630e-02,\n",
              "                      -1.8545e-02, -4.0091e-03,  6.6842e-03, -3.4026e-02,  2.6621e-02,\n",
              "                      -1.6085e-02,  4.5646e-02])),\n",
              "             ('conv4.4.running_var',\n",
              "              tensor([0.9245, 0.9308, 0.9242, 0.9214, 0.9268, 0.9180, 0.9270, 0.9231, 0.9268,\n",
              "                      0.9266, 0.9188, 0.9319, 0.9165, 0.9230, 0.9286, 0.9320, 0.9242, 0.9304,\n",
              "                      0.9273, 0.9206, 0.9257, 0.9298, 0.9185, 0.9255, 0.9310, 0.9287, 0.9251,\n",
              "                      0.9223, 0.9246, 0.9323, 0.9301, 0.9279, 0.9259, 0.9237, 0.9210, 0.9230,\n",
              "                      0.9333, 0.9372, 0.9350, 0.9297, 0.9300, 0.9252, 0.9283, 0.9382, 0.9427,\n",
              "                      0.9255, 0.9254, 0.9273, 0.9254, 0.9217, 0.9282, 0.9337, 0.9217, 0.9267,\n",
              "                      0.9235, 0.9244, 0.9276, 0.9217, 0.9400, 0.9266, 0.9254, 0.9266, 0.9254,\n",
              "                      0.9301, 0.9220, 0.9210, 0.9245, 0.9311, 0.9433, 0.9255, 0.9266, 0.9282,\n",
              "                      0.9225, 0.9190, 0.9401, 0.9234, 0.9252, 0.9326, 0.9305, 0.9269, 0.9320,\n",
              "                      0.9371, 0.9356, 0.9300, 0.9282, 0.9321, 0.9266, 0.9267, 0.9257, 0.9250,\n",
              "                      0.9206, 0.9186, 0.9326, 0.9231, 0.9214, 0.9384, 0.9367, 0.9385, 0.9229,\n",
              "                      0.9305, 0.9328, 0.9204, 0.9251, 0.9187, 0.9259, 0.9248, 0.9358, 0.9193,\n",
              "                      0.9286, 0.9321, 0.9295, 0.9345, 0.9317, 0.9303, 0.9343, 0.9389, 0.9248,\n",
              "                      0.9367, 0.9217, 0.9212, 0.9305, 0.9335, 0.9240, 0.9292, 0.9264, 0.9197,\n",
              "                      0.9265, 0.9301, 0.9290, 0.9388, 0.9221, 0.9283, 0.9315, 0.9314, 0.9201,\n",
              "                      0.9246, 0.9438, 0.9226, 0.9197, 0.9193, 0.9296, 0.9313, 0.9223, 0.9219,\n",
              "                      0.9219, 0.9246, 0.9268, 0.9258, 0.9209, 0.9274, 0.9213, 0.9216, 0.9379,\n",
              "                      0.9274, 0.9266, 0.9233, 0.9271, 0.9231, 0.9233, 0.9234, 0.9317, 0.9399,\n",
              "                      0.9258, 0.9253, 0.9386, 0.9287, 0.9284, 0.9228, 0.9162, 0.9281, 0.9206,\n",
              "                      0.9338, 0.9428, 0.9237, 0.9234, 0.9274, 0.9224, 0.9241, 0.9288, 0.9211,\n",
              "                      0.9295, 0.9238, 0.9223, 0.9288, 0.9315, 0.9417, 0.9375, 0.9352, 0.9265,\n",
              "                      0.9247, 0.9416, 0.9243, 0.9221, 0.9242, 0.9209, 0.9265, 0.9224, 0.9351,\n",
              "                      0.9229, 0.9246, 0.9341, 0.9263, 0.9323, 0.9221, 0.9289, 0.9235, 0.9266,\n",
              "                      0.9291, 0.9284, 0.9352, 0.9236, 0.9346, 0.9225, 0.9286, 0.9214, 0.9236,\n",
              "                      0.9163, 0.9261, 0.9264, 0.9215, 0.9277, 0.9263, 0.9299, 0.9211, 0.9249,\n",
              "                      0.9254, 0.9274, 0.9241, 0.9232, 0.9291, 0.9263, 0.9221, 0.9226, 0.9310,\n",
              "                      0.9240, 0.9268, 0.9295, 0.9303, 0.9201, 0.9291, 0.9194, 0.9219, 0.9216,\n",
              "                      0.9298, 0.9305, 0.9237, 0.9323, 0.9297, 0.9244, 0.9262, 0.9278, 0.9349,\n",
              "                      0.9239, 0.9270, 0.9234, 0.9251, 0.9306, 0.9259, 0.9287, 0.9514, 0.9436,\n",
              "                      0.9251, 0.9333, 0.9225, 0.9283, 0.9228, 0.9329, 0.9234, 0.9267, 0.9247,\n",
              "                      0.9280, 0.9280, 0.9281, 0.9223, 0.9283, 0.9258, 0.9524, 0.9238, 0.9224,\n",
              "                      0.9199, 0.9302, 0.9209, 0.9311, 0.9292, 0.9209, 0.9223, 0.9335, 0.9275,\n",
              "                      0.9242, 0.9219, 0.9246, 0.9185, 0.9270, 0.9308, 0.9269, 0.9291, 0.9200,\n",
              "                      0.9310, 0.9291, 0.9221, 0.9273, 0.9351, 0.9448, 0.9376, 0.9218, 0.9259,\n",
              "                      0.9289, 0.9262, 0.9332, 0.9318, 0.9278, 0.9247, 0.9354, 0.9228, 0.9204,\n",
              "                      0.9296, 0.9329, 0.9256, 0.9244, 0.9273, 0.9301, 0.9376, 0.9218, 0.9319,\n",
              "                      0.9248, 0.9227, 0.9225, 0.9232, 0.9273, 0.9321, 0.9255, 0.9239, 0.9270,\n",
              "                      0.9219, 0.9267, 0.9274, 0.9275, 0.9242, 0.9239, 0.9259, 0.9310, 0.9282,\n",
              "                      0.9236, 0.9317, 0.9287, 0.9229, 0.9334, 0.9324, 0.9280, 0.9271, 0.9197,\n",
              "                      0.9245, 0.9223, 0.9477, 0.9311, 0.9226, 0.9275, 0.9253, 0.9250, 0.9253,\n",
              "                      0.9227, 0.9235, 0.9371, 0.9252, 0.9221, 0.9304, 0.9346, 0.9235, 0.9220,\n",
              "                      0.9340, 0.9228, 0.9381, 0.9242, 0.9227, 0.9241, 0.9279, 0.9211, 0.9269,\n",
              "                      0.9344, 0.9300, 0.9357, 0.9336, 0.9194, 0.9262, 0.9417, 0.9220, 0.9210,\n",
              "                      0.9234, 0.9197, 0.9211, 0.9235, 0.9193, 0.9229, 0.9280, 0.9252, 0.9313,\n",
              "                      0.9233, 0.9224, 0.9274, 0.9212, 0.9250, 0.9304, 0.9290, 0.9210, 0.9390,\n",
              "                      0.9244, 0.9240, 0.9265, 0.9246, 0.9272, 0.9297, 0.9263, 0.9285, 0.9287,\n",
              "                      0.9272, 0.9314, 0.9370, 0.9292, 0.9249, 0.9188, 0.9190, 0.9270, 0.9297,\n",
              "                      0.9323, 0.9327, 0.9227, 0.9251, 0.9181, 0.9322, 0.9259, 0.9255, 0.9406,\n",
              "                      0.9328, 0.9344, 0.9277, 0.9308, 0.9253, 0.9254, 0.9234, 0.9240, 0.9350,\n",
              "                      0.9260, 0.9209, 0.9470, 0.9244, 0.9278, 0.9284, 0.9312, 0.9359, 0.9274,\n",
              "                      0.9193, 0.9373, 0.9208, 0.9235, 0.9278, 0.9287, 0.9259, 0.9246, 0.9231,\n",
              "                      0.9233, 0.9243, 0.9221, 0.9275, 0.9268, 0.9328, 0.9226, 0.9283, 0.9246,\n",
              "                      0.9259, 0.9244, 0.9399, 0.9292, 0.9402, 0.9314, 0.9295, 0.9223, 0.9252,\n",
              "                      0.9330, 0.9255, 0.9254, 0.9237, 0.9322, 0.9304, 0.9254, 0.9228, 0.9264,\n",
              "                      0.9352, 0.9252, 0.9245, 0.9243, 0.9340, 0.9285, 0.9393, 0.9234, 0.9242,\n",
              "                      0.9206, 0.9235, 0.9240, 0.9207, 0.9230, 0.9234, 0.9227, 0.9208, 0.9249,\n",
              "                      0.9292, 0.9250, 0.9272, 0.9250, 0.9325, 0.9220, 0.9206, 0.9269])),\n",
              "             ('conv4.4.num_batches_tracked', tensor(1)),\n",
              "             ('conv5.0.weight',\n",
              "              tensor([[[[-1.0826e-02,  4.3011e-03, -2.8661e-03],\n",
              "                        [ 1.9902e-02, -2.2610e-02, -1.3206e-02],\n",
              "                        [-1.3436e-04, -8.9054e-03, -7.2499e-03]],\n",
              "              \n",
              "                       [[-2.2905e-02, -1.7473e-02, -5.2707e-03],\n",
              "                        [-9.4953e-03,  2.2737e-02,  1.1243e-02],\n",
              "                        [-2.1310e-02, -9.1725e-03,  1.5776e-03]],\n",
              "              \n",
              "                       [[ 1.4277e-02,  7.5198e-03,  3.0226e-03],\n",
              "                        [-3.7428e-03, -3.7832e-03,  1.2675e-02],\n",
              "                        [-1.8891e-02, -7.9197e-03, -2.5236e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-7.7593e-03,  1.6089e-02, -8.7447e-03],\n",
              "                        [-1.0590e-02,  2.2251e-02, -1.2066e-02],\n",
              "                        [ 2.1795e-02,  2.4942e-06, -2.1081e-02]],\n",
              "              \n",
              "                       [[-2.0343e-02, -2.3699e-02, -2.2781e-03],\n",
              "                        [-2.3957e-02,  2.4544e-02,  1.7826e-02],\n",
              "                        [ 1.1191e-02, -1.7713e-02,  6.5658e-03]],\n",
              "              \n",
              "                       [[-4.8543e-03, -2.1112e-02, -1.7118e-02],\n",
              "                        [-1.4069e-02, -1.3788e-02,  1.2069e-02],\n",
              "                        [-1.0678e-03,  1.0746e-02, -1.4385e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.7361e-02, -1.8507e-02, -4.0796e-03],\n",
              "                        [-3.2945e-03,  8.9378e-03, -3.9118e-03],\n",
              "                        [ 8.0488e-03,  9.5055e-03,  9.9630e-03]],\n",
              "              \n",
              "                       [[ 1.2765e-02,  2.1077e-02, -1.6196e-02],\n",
              "                        [ 5.6782e-05, -2.4133e-02, -6.7903e-03],\n",
              "                        [-9.6172e-03, -6.9346e-03,  1.3736e-02]],\n",
              "              \n",
              "                       [[ 9.3165e-03, -1.3523e-02, -9.8473e-03],\n",
              "                        [ 1.4141e-02, -2.3918e-02, -7.4244e-03],\n",
              "                        [-4.7756e-03, -2.2341e-02,  7.0359e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-1.5001e-02, -1.9520e-02, -1.2698e-02],\n",
              "                        [ 1.3893e-02, -1.0932e-02,  9.9771e-03],\n",
              "                        [ 3.4200e-03,  8.3890e-03,  1.5201e-02]],\n",
              "              \n",
              "                       [[ 2.2499e-02,  1.1376e-02, -8.7986e-03],\n",
              "                        [ 1.9675e-02, -7.7927e-03, -2.5287e-02],\n",
              "                        [ 2.5015e-02,  1.3926e-02,  6.2731e-03]],\n",
              "              \n",
              "                       [[-2.6845e-03,  2.6082e-04, -4.4220e-03],\n",
              "                        [-2.2662e-02,  2.0478e-02, -4.7033e-03],\n",
              "                        [-1.3481e-03, -1.2701e-02,  1.4392e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[-2.5288e-03,  2.0067e-02, -3.0541e-03],\n",
              "                        [ 9.1638e-03, -2.1185e-02,  1.1838e-02],\n",
              "                        [-4.0525e-03,  1.4048e-02,  6.5253e-03]],\n",
              "              \n",
              "                       [[-2.4145e-02,  1.3790e-02, -5.4927e-03],\n",
              "                        [-1.9616e-02,  1.5575e-02, -1.3161e-02],\n",
              "                        [-2.0385e-02, -1.7177e-03, -1.2937e-02]],\n",
              "              \n",
              "                       [[ 1.6042e-02,  1.0668e-02,  1.0024e-02],\n",
              "                        [ 2.4543e-02,  1.4178e-02, -1.8863e-02],\n",
              "                        [-8.3171e-03,  1.2193e-02,  1.5153e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 1.0485e-02, -2.8902e-03, -1.5263e-02],\n",
              "                        [ 1.0713e-02, -9.2926e-03, -1.3061e-02],\n",
              "                        [ 1.3267e-02,  1.1742e-03,  7.0154e-03]],\n",
              "              \n",
              "                       [[-1.7141e-02,  2.5305e-02,  1.2902e-02],\n",
              "                        [-4.4386e-03, -1.5487e-02,  6.1590e-03],\n",
              "                        [-5.3391e-03, -8.7943e-03,  2.5188e-02]],\n",
              "              \n",
              "                       [[-5.7819e-03,  3.5956e-03,  1.4856e-02],\n",
              "                        [-1.5432e-02, -2.3606e-02, -2.0545e-02],\n",
              "                        [ 1.1799e-03, -9.4175e-03, -3.3495e-03]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 1.4272e-03,  1.9361e-02, -1.7215e-02],\n",
              "                        [-1.5576e-02, -1.7388e-02,  2.2129e-02],\n",
              "                        [ 1.9527e-02,  2.4273e-02,  2.5354e-03]],\n",
              "              \n",
              "                       [[ 6.4573e-04,  1.3757e-02, -9.7448e-04],\n",
              "                        [ 2.5434e-02,  1.1868e-02, -2.0757e-02],\n",
              "                        [-7.7922e-03,  2.1980e-02, -6.6644e-03]],\n",
              "              \n",
              "                       [[-1.5859e-02,  2.4183e-02,  1.2125e-02],\n",
              "                        [ 2.0944e-02,  4.6216e-03, -7.6294e-03],\n",
              "                        [-7.2680e-03,  1.2095e-02,  1.1826e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-9.7670e-04,  2.5392e-02, -1.6905e-02],\n",
              "                        [-2.2322e-02,  1.1657e-02,  1.6916e-02],\n",
              "                        [ 1.8907e-02, -8.2314e-04, -1.1209e-02]],\n",
              "              \n",
              "                       [[-2.3011e-02, -5.9445e-03,  1.5847e-02],\n",
              "                        [-1.1559e-02,  2.4544e-03,  1.7934e-02],\n",
              "                        [ 6.3277e-03, -9.9283e-03,  2.0989e-02]],\n",
              "              \n",
              "                       [[-1.0603e-02,  1.5257e-03, -2.4473e-02],\n",
              "                        [ 1.7052e-02,  7.1533e-03, -2.3110e-02],\n",
              "                        [-9.6761e-03, -3.3763e-03,  3.7787e-03]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.3121e-02, -2.5135e-02, -2.3283e-03],\n",
              "                        [-2.5434e-02,  2.3181e-02,  1.5128e-02],\n",
              "                        [ 9.1178e-03, -1.2449e-02,  1.6061e-02]],\n",
              "              \n",
              "                       [[-4.3950e-03, -1.4093e-02,  1.3433e-02],\n",
              "                        [ 1.5660e-02, -1.9996e-03,  1.5383e-02],\n",
              "                        [ 2.2557e-02, -2.0545e-02,  2.0888e-02]],\n",
              "              \n",
              "                       [[-2.6145e-03,  6.6709e-03, -1.1784e-02],\n",
              "                        [-3.0971e-03,  1.9965e-02,  2.0616e-02],\n",
              "                        [-8.8787e-03, -1.8188e-03, -1.3707e-02]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 3.2792e-03,  8.2400e-03, -2.1194e-02],\n",
              "                        [-6.7306e-03, -2.1148e-02, -1.2879e-02],\n",
              "                        [ 5.9422e-04, -1.3086e-03,  6.2993e-03]],\n",
              "              \n",
              "                       [[ 2.1828e-02,  7.8495e-03, -9.8728e-04],\n",
              "                        [ 5.5680e-03, -8.0693e-03,  7.5552e-03],\n",
              "                        [ 9.3197e-03, -4.3410e-03,  1.3349e-02]],\n",
              "              \n",
              "                       [[-1.5047e-02,  4.0466e-03,  1.4780e-02],\n",
              "                        [-3.8739e-03, -1.7741e-02,  2.8521e-03],\n",
              "                        [ 1.9710e-02,  1.8513e-02,  2.0480e-02]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 1.2346e-02, -3.9835e-03,  1.7885e-03],\n",
              "                        [-1.4674e-02,  2.0861e-04, -2.5280e-02],\n",
              "                        [-2.3489e-03, -1.4051e-02, -2.0933e-02]],\n",
              "              \n",
              "                       [[ 1.1142e-02, -6.2363e-03,  1.6736e-02],\n",
              "                        [-1.6009e-02, -2.6142e-04,  2.2060e-02],\n",
              "                        [-2.3995e-02,  4.9407e-03,  8.4572e-03]],\n",
              "              \n",
              "                       [[ 5.8124e-03,  1.3500e-02,  1.6816e-02],\n",
              "                        [-2.1424e-02, -8.9746e-03, -8.7134e-03],\n",
              "                        [ 2.1322e-02, -2.1844e-02, -6.4749e-03]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 8.3718e-03,  1.6856e-02, -1.4246e-02],\n",
              "                        [-2.9532e-03, -2.1181e-02, -2.4247e-02],\n",
              "                        [ 1.4844e-03, -2.1129e-02,  2.1726e-02]],\n",
              "              \n",
              "                       [[ 9.4864e-03, -2.0941e-02, -2.1706e-02],\n",
              "                        [ 1.9050e-02, -7.7990e-03,  3.2030e-03],\n",
              "                        [-2.3174e-02,  2.6368e-04,  1.9447e-02]],\n",
              "              \n",
              "                       [[-1.4135e-02, -3.5601e-03, -6.1072e-03],\n",
              "                        [ 1.6127e-03,  2.3990e-02,  1.8563e-02],\n",
              "                        [-1.1865e-03, -1.0388e-02, -5.0648e-03]]]])),\n",
              "             ('conv5.0.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.1.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv5.1.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.1.running_mean',\n",
              "              tensor([ 7.2295e-02,  3.4473e-02,  1.1453e-02, -6.5546e-02, -7.7446e-04,\n",
              "                      -7.9191e-03,  2.6453e-03,  2.2707e-02, -4.8553e-03,  4.9636e-02,\n",
              "                       2.5070e-03, -1.5563e-01,  6.3272e-02,  4.2300e-02, -1.9548e-02,\n",
              "                       3.3926e-02, -1.7155e-02,  5.5027e-02,  4.6735e-02,  8.0921e-02,\n",
              "                       1.6021e-02, -4.2785e-03, -1.9923e-02,  1.6699e-03,  3.2847e-02,\n",
              "                      -3.0415e-02, -1.2274e-02,  2.5471e-02, -4.4946e-03, -8.7415e-02,\n",
              "                      -3.7502e-02,  1.7483e-02,  4.5413e-02, -7.9720e-02, -1.0759e-02,\n",
              "                       4.6050e-02, -9.7134e-02,  1.8448e-02,  2.0971e-03,  5.2289e-02,\n",
              "                       5.0750e-03,  1.7443e-02,  1.0546e-01, -7.1348e-02,  2.2424e-02,\n",
              "                       4.9194e-02,  1.1161e-01,  1.4682e-03, -3.6203e-02,  5.6067e-02,\n",
              "                      -4.6075e-02, -2.6847e-02,  1.7063e-02, -5.0332e-02,  7.2362e-02,\n",
              "                      -5.5661e-02,  7.4904e-04,  4.2353e-03, -1.2102e-02, -3.4186e-03,\n",
              "                       3.8755e-02, -1.6270e-02,  3.5091e-03, -3.1093e-02, -8.2633e-02,\n",
              "                      -1.0675e-01,  4.5185e-03, -2.2067e-02, -2.2028e-02,  6.9607e-03,\n",
              "                       3.7559e-02,  1.4112e-02, -4.9107e-02, -3.7997e-02,  2.3899e-03,\n",
              "                      -7.3116e-03, -4.2147e-02, -9.6372e-03, -1.0138e-01,  8.3358e-02,\n",
              "                       2.4160e-02, -1.4033e-02,  4.9094e-02, -3.1403e-02, -4.2145e-02,\n",
              "                       3.9887e-02, -2.6773e-02,  9.1091e-02, -1.2444e-02,  1.9434e-02,\n",
              "                       4.8616e-04,  4.9474e-02,  2.2908e-02,  9.9076e-03,  4.8551e-02,\n",
              "                       1.3698e-01,  7.8812e-02,  2.6266e-02, -6.6487e-02,  4.3001e-02,\n",
              "                       9.9432e-03,  4.6040e-03, -7.5347e-02, -2.5369e-02, -1.0790e-02,\n",
              "                      -6.0037e-02, -9.5666e-02,  6.1720e-03,  6.3877e-03,  1.4795e-02,\n",
              "                       4.5150e-02, -6.3874e-03, -2.8019e-02,  2.4852e-02, -5.4854e-02,\n",
              "                      -2.5051e-02,  3.3256e-03, -8.1402e-03, -8.2288e-02, -7.8620e-02,\n",
              "                      -5.5755e-02, -5.7250e-02,  2.1994e-02, -2.4711e-02, -8.3808e-02,\n",
              "                      -3.4544e-02, -4.0722e-02, -1.2901e-02,  8.2013e-02, -5.0130e-02,\n",
              "                       2.8114e-02, -1.3803e-02,  1.9779e-02,  4.4224e-02,  3.8740e-02,\n",
              "                       1.5206e-03,  4.3800e-02, -3.3444e-02, -2.0508e-02,  4.1362e-02,\n",
              "                       1.6243e-02, -1.2046e-02, -2.2873e-02, -7.0096e-02, -1.6120e-02,\n",
              "                       8.0616e-02, -3.2151e-02,  7.6196e-02, -1.5328e-03,  1.5180e-02,\n",
              "                      -1.0174e-01,  7.1023e-02, -6.2742e-02, -1.1783e-02, -5.0738e-02,\n",
              "                       1.4222e-02,  8.7595e-03, -7.4683e-03, -6.6705e-02,  5.2780e-02,\n",
              "                      -5.6629e-02,  7.0740e-02,  4.4833e-02, -3.2950e-02,  5.0730e-03,\n",
              "                      -6.5129e-03,  3.4453e-02,  6.8251e-02, -2.0310e-02,  5.5678e-02,\n",
              "                       4.9920e-02,  8.0009e-03,  2.6679e-02,  3.0098e-02,  3.5465e-02,\n",
              "                       2.8966e-02,  1.5662e-03, -2.5290e-02,  2.5593e-02, -3.1540e-02,\n",
              "                       4.8080e-02,  2.0464e-02, -8.1019e-03, -1.0192e-01, -4.5777e-02,\n",
              "                      -5.7418e-02, -5.6367e-02, -9.2667e-02,  4.1506e-02, -2.5589e-02,\n",
              "                       5.1156e-02,  2.2953e-02,  2.1986e-03,  6.7943e-03, -5.0905e-02,\n",
              "                       2.8868e-02, -3.4036e-03,  1.1289e-02,  1.9226e-02,  4.2165e-02,\n",
              "                       1.1872e-01,  9.1409e-03, -2.2086e-02, -1.0722e-02,  4.3483e-02,\n",
              "                       4.1746e-02,  1.1999e-01,  5.6161e-02, -7.7375e-02,  7.1035e-03,\n",
              "                      -9.2508e-02,  9.6143e-03,  1.0657e-02, -8.2331e-02,  6.9682e-02,\n",
              "                       7.5534e-02,  1.0007e-01,  3.3093e-02,  8.5591e-03, -4.0995e-02,\n",
              "                       1.0795e-01, -1.9835e-04, -2.7045e-02, -2.3572e-02, -7.5611e-02,\n",
              "                       2.5533e-02, -4.9952e-03,  4.0136e-02, -1.0776e-01,  8.1552e-03,\n",
              "                      -3.0291e-02,  8.0783e-02,  3.4677e-03,  1.4032e-02, -4.0244e-03,\n",
              "                       3.3254e-02,  3.6326e-02,  2.2523e-02,  7.2021e-02, -2.9165e-02,\n",
              "                       3.7263e-02, -6.9172e-02,  5.4285e-03,  4.0867e-02, -4.3770e-02,\n",
              "                      -2.3695e-02,  8.0708e-03,  6.3045e-02,  2.8658e-02, -5.7880e-02,\n",
              "                      -3.7118e-02,  4.8868e-02,  4.9888e-02,  3.4951e-02, -1.4099e-02,\n",
              "                      -5.4054e-02,  2.0531e-02,  2.6274e-02,  5.5422e-02,  3.3101e-02,\n",
              "                       5.3333e-02,  5.0948e-02,  4.5072e-02,  2.3384e-03,  6.7140e-02,\n",
              "                       1.1973e-02, -9.9885e-03, -3.5614e-02,  6.7015e-02,  7.0112e-02,\n",
              "                      -2.8701e-02,  7.7450e-02,  4.1088e-02,  2.2093e-02,  2.0023e-03,\n",
              "                       2.7006e-02, -1.1739e-02, -5.3643e-02,  1.7987e-02, -3.5664e-02,\n",
              "                      -5.5485e-02, -3.2545e-02, -6.1495e-02,  7.4333e-03, -3.6993e-02,\n",
              "                      -5.0562e-02,  2.3988e-02,  7.9666e-02, -5.1251e-02,  3.4065e-02,\n",
              "                      -6.7304e-02,  1.2239e-03,  5.6671e-02,  2.7548e-02, -6.0259e-02,\n",
              "                       3.5154e-02,  3.8442e-02, -1.2198e-02,  8.8379e-02,  6.8536e-04,\n",
              "                       8.6751e-02,  2.7147e-02,  6.1862e-02, -1.5393e-02,  4.1285e-02,\n",
              "                       1.2539e-02,  6.4159e-02,  8.1538e-03,  1.2091e-02,  3.8552e-02,\n",
              "                       2.8573e-02,  3.2216e-03, -9.7184e-02, -3.3319e-03, -9.1292e-02,\n",
              "                      -2.0005e-02,  1.0434e-02,  9.2735e-02, -6.3390e-02, -1.9898e-02,\n",
              "                       2.5802e-02,  5.5487e-02, -1.1267e-02,  2.2545e-02, -4.1206e-03,\n",
              "                      -5.2510e-02,  3.1911e-02,  6.0522e-02,  2.6692e-02, -4.4144e-02,\n",
              "                       1.2291e-04, -1.8656e-02, -1.4740e-02,  3.4433e-02,  2.1358e-02,\n",
              "                       9.3070e-02,  1.4322e-02, -2.8088e-02,  1.7867e-02, -5.7796e-02,\n",
              "                       5.4769e-02,  8.4275e-02, -1.5022e-02, -4.3172e-03,  6.2047e-02,\n",
              "                       1.2404e-02,  5.2259e-02,  4.3449e-04,  2.9790e-02, -6.2742e-02,\n",
              "                      -1.6871e-02,  3.1893e-02,  3.1110e-03, -3.1069e-02,  2.5673e-02,\n",
              "                      -1.3939e-02,  3.7829e-02, -1.3226e-04, -9.7542e-03,  6.4862e-02,\n",
              "                      -1.5642e-02,  1.6077e-02, -2.8026e-02,  5.9114e-02, -9.9678e-02,\n",
              "                       2.7863e-02,  9.8896e-02, -2.7851e-02,  4.1899e-03,  7.3845e-02,\n",
              "                       4.0524e-02, -5.7010e-02, -3.6340e-02,  1.0772e-02, -3.2997e-02,\n",
              "                      -7.6205e-02,  3.6164e-02,  1.2615e-02, -6.8493e-02,  1.6410e-03,\n",
              "                       7.6729e-03,  2.1501e-02,  5.0125e-02, -1.3705e-02, -6.7698e-02,\n",
              "                      -2.3878e-02, -4.3799e-02,  9.5904e-02,  1.7907e-03, -1.1797e-01,\n",
              "                       8.2960e-03, -3.6230e-02,  7.8765e-03,  6.0072e-02,  4.6670e-02,\n",
              "                      -4.4883e-02,  5.2399e-02,  3.1531e-02,  6.4318e-02,  3.1632e-02,\n",
              "                       5.7465e-02,  7.3657e-03,  6.3175e-03,  7.1216e-03, -4.4533e-02,\n",
              "                      -2.5110e-02, -2.0539e-02,  7.0967e-02, -7.1534e-02,  1.6763e-02,\n",
              "                      -4.7502e-02, -3.8494e-02, -2.5815e-02, -7.4428e-03, -4.8318e-02,\n",
              "                      -3.0271e-02,  1.2975e-02, -7.2157e-04,  9.3109e-03,  1.7141e-02,\n",
              "                       1.1890e-02,  1.4425e-03,  7.0907e-02,  2.2749e-02,  5.5212e-02,\n",
              "                      -3.6039e-02,  5.8936e-02, -3.1935e-02, -4.4403e-02, -2.4020e-03,\n",
              "                      -4.3473e-02, -6.1404e-02,  4.7569e-02,  2.0777e-03, -4.1034e-02,\n",
              "                       1.4741e-02,  2.2627e-02,  7.8346e-02, -6.3957e-02, -2.3471e-02,\n",
              "                       2.7012e-02, -3.4851e-02, -2.2881e-02, -4.8160e-02,  5.7303e-03,\n",
              "                       2.5680e-02,  2.5040e-02, -2.9431e-02,  2.8786e-02, -1.7706e-03,\n",
              "                      -2.7414e-02,  1.5815e-03,  3.1480e-02,  1.7516e-02,  3.9752e-02,\n",
              "                      -2.4209e-02,  4.8261e-02,  2.2773e-02, -3.3296e-02,  3.2972e-02,\n",
              "                       8.1589e-03,  1.6574e-02,  3.4506e-02,  4.1201e-02,  3.9530e-02,\n",
              "                       3.9926e-03, -4.8358e-02,  3.8268e-02, -4.2409e-02,  8.0647e-03,\n",
              "                       8.4765e-02, -1.8755e-02,  4.3292e-02,  4.9565e-02, -1.3557e-03,\n",
              "                      -2.0168e-02, -1.9507e-02, -4.1724e-02,  3.7718e-02,  5.4530e-02,\n",
              "                      -5.7222e-02, -8.4996e-02,  5.5094e-03,  3.9479e-02,  5.4217e-03,\n",
              "                       2.4854e-02, -6.6979e-02,  5.0579e-02, -3.7410e-02, -6.9152e-03,\n",
              "                       4.0831e-02,  1.9559e-02,  1.2211e-02,  2.8423e-02,  1.8482e-02,\n",
              "                       2.7295e-02, -3.3401e-02, -2.9465e-02,  6.1742e-02, -1.7505e-02,\n",
              "                      -5.1281e-02, -1.1333e-01, -8.6161e-02, -3.7585e-02,  2.7166e-02,\n",
              "                       1.2877e-02, -2.8665e-02,  2.9423e-02, -4.1357e-02, -4.5100e-02,\n",
              "                       2.0216e-02, -1.8447e-02])),\n",
              "             ('conv5.1.running_var',\n",
              "              tensor([0.9551, 0.9335, 0.9494, 0.9285, 0.9516, 0.9561, 0.9354, 0.9394, 0.9532,\n",
              "                      0.9358, 0.9674, 0.9414, 0.9382, 0.9452, 0.9232, 0.9268, 0.9319, 0.9370,\n",
              "                      0.9410, 0.9488, 0.9453, 0.9506, 0.9349, 0.9543, 0.9649, 0.9299, 0.9344,\n",
              "                      0.9388, 0.9556, 0.9458, 0.9333, 0.9254, 0.9273, 0.9539, 0.9258, 0.9287,\n",
              "                      0.9423, 0.9210, 0.9783, 0.9687, 0.9339, 0.9255, 0.9300, 0.9428, 0.9386,\n",
              "                      0.9301, 0.9371, 0.9790, 0.9541, 0.9323, 0.9526, 0.9365, 0.9181, 0.9356,\n",
              "                      0.9359, 0.9244, 0.9817, 1.0760, 0.9333, 0.9294, 0.9284, 0.9320, 0.9436,\n",
              "                      0.9331, 0.9253, 0.9617, 0.9527, 0.9235, 0.9552, 0.9500, 0.9634, 0.9392,\n",
              "                      0.9575, 0.9281, 0.9163, 0.9387, 0.9585, 0.9326, 0.9891, 0.9403, 0.9322,\n",
              "                      0.9603, 0.9415, 0.9423, 0.9661, 0.9282, 0.9293, 0.9285, 0.9926, 0.9983,\n",
              "                      0.9313, 0.9401, 0.9317, 0.9336, 0.9337, 0.9459, 0.9360, 0.9377, 0.9371,\n",
              "                      1.0159, 0.9502, 0.9268, 0.9345, 0.9624, 0.9217, 0.9334, 0.9265, 0.9567,\n",
              "                      0.9268, 0.9509, 0.9336, 0.9287, 0.9284, 0.9310, 0.9559, 0.9291, 0.9534,\n",
              "                      0.9590, 0.9398, 0.9448, 0.9406, 0.9275, 0.9778, 0.9335, 0.9402, 0.9454,\n",
              "                      0.9520, 0.9886, 1.0129, 0.9465, 1.0025, 0.9258, 0.9556, 0.9247, 0.9504,\n",
              "                      0.9339, 0.9357, 0.9303, 0.9309, 0.9384, 0.9214, 0.9413, 0.9264, 0.9180,\n",
              "                      0.9183, 0.9332, 0.9368, 0.9646, 0.9509, 0.9255, 0.9313, 0.9352, 0.9265,\n",
              "                      0.9770, 0.9755, 0.9206, 0.9425, 0.9338, 0.9258, 0.9258, 0.9164, 0.9527,\n",
              "                      0.9352, 0.9416, 0.9272, 0.9302, 0.9666, 0.9315, 0.9185, 0.9511, 0.9987,\n",
              "                      0.9809, 0.9485, 0.9195, 0.9686, 0.9265, 0.9261, 0.9311, 0.9655, 0.9447,\n",
              "                      0.9234, 0.9184, 0.9185, 0.9541, 0.9345, 0.9842, 0.9633, 0.9471, 0.9253,\n",
              "                      0.9220, 0.9306, 0.9289, 0.9703, 0.9445, 0.9401, 0.9561, 0.9608, 0.9699,\n",
              "                      0.9178, 0.9313, 0.9294, 0.9639, 0.9388, 0.9274, 0.9497, 0.9316, 0.9230,\n",
              "                      0.9308, 0.9972, 0.9483, 0.9342, 0.9691, 0.9266, 0.9308, 0.9295, 0.9352,\n",
              "                      0.9546, 0.9407, 0.9697, 0.9328, 0.9394, 0.9361, 0.9433, 0.9535, 0.9346,\n",
              "                      0.9514, 0.9408, 0.9338, 0.9526, 0.9243, 0.9279, 0.9552, 0.9304, 0.9260,\n",
              "                      0.9421, 0.9353, 0.9201, 0.9338, 0.9293, 0.9263, 0.9299, 0.9799, 0.9273,\n",
              "                      0.9676, 0.9330, 0.9483, 0.9763, 0.9446, 0.9441, 0.9280, 0.9230, 0.9252,\n",
              "                      0.9822, 0.9311, 0.9297, 0.9167, 0.9550, 0.9741, 0.9478, 0.9228, 0.9397,\n",
              "                      0.9800, 0.9149, 0.9388, 0.9515, 0.9383, 0.9835, 0.9407, 0.9272, 0.9337,\n",
              "                      0.9263, 0.9434, 0.9290, 0.9419, 0.9739, 0.9478, 0.9246, 0.9807, 0.9181,\n",
              "                      0.9957, 0.9446, 0.9220, 0.9490, 0.9287, 0.9535, 0.9473, 0.9394, 0.9182,\n",
              "                      0.9607, 0.9939, 0.9291, 0.9868, 0.9416, 0.9459, 0.9624, 0.9287, 0.9948,\n",
              "                      0.9296, 0.9348, 0.9799, 1.0241, 0.9186, 0.9346, 0.9276, 0.9434, 0.9483,\n",
              "                      0.9801, 0.9325, 0.9359, 0.9349, 0.9418, 0.9266, 0.9423, 0.9473, 0.9504,\n",
              "                      0.9234, 0.9640, 0.9462, 0.9226, 0.9553, 0.9480, 0.9402, 0.9391, 0.9663,\n",
              "                      0.9638, 0.9646, 0.9190, 0.9705, 0.9178, 0.9442, 0.9307, 0.9355, 0.9261,\n",
              "                      0.9245, 0.9945, 0.9285, 0.9162, 0.9383, 0.9357, 0.9238, 0.9251, 1.0064,\n",
              "                      0.9402, 0.9280, 0.9339, 0.9413, 0.9378, 0.9470, 0.9248, 0.9621, 0.9276,\n",
              "                      0.9616, 0.9263, 0.9492, 1.0196, 0.9232, 0.9474, 0.9210, 0.9670, 0.9299,\n",
              "                      0.9319, 0.9240, 0.9282, 0.9338, 0.9625, 0.9549, 0.9554, 0.9553, 0.9267,\n",
              "                      0.9418, 0.9301, 0.9212, 0.9470, 0.9369, 0.9365, 0.9412, 0.9487, 0.9295,\n",
              "                      0.9268, 0.9384, 0.9254, 0.9241, 0.9218, 0.9479, 0.9248, 0.9414, 0.9211,\n",
              "                      0.9286, 0.9333, 0.9445, 0.9331, 0.9251, 0.9462, 0.9274, 0.9291, 0.9255,\n",
              "                      0.9607, 0.9340, 0.9300, 0.9647, 0.9535, 0.9503, 0.9193, 0.9370, 0.9222,\n",
              "                      0.9846, 0.9614, 0.9252, 0.9711, 0.9638, 0.9300, 0.9536, 0.9302, 0.9193,\n",
              "                      0.9416, 0.9458, 0.9511, 0.9226, 0.9429, 0.9694, 0.9389, 0.9448, 0.9214,\n",
              "                      0.9223, 0.9350, 0.9346, 0.9288, 0.9284, 0.9789, 0.9354, 0.9236, 0.9470,\n",
              "                      0.9199, 0.9959, 0.9175, 0.9725, 0.9446, 0.9356, 0.9323, 0.9233, 0.9457,\n",
              "                      0.9322, 0.9311, 0.9355, 0.9305, 0.9276, 0.9392, 0.9483, 0.9233, 0.9245,\n",
              "                      0.9403, 0.9471, 0.9436, 0.9612, 0.9261, 0.9313, 0.9205, 0.9230, 0.9219,\n",
              "                      0.9537, 0.9448, 0.9695, 0.9646, 0.9224, 0.9367, 0.9250, 0.9423, 0.9271,\n",
              "                      0.9289, 0.9304, 0.9272, 0.9271, 0.9266, 0.9210, 0.9400, 0.9260, 0.9253,\n",
              "                      0.9331, 0.9404, 0.9291, 0.9490, 0.9342, 0.9304, 0.9209, 0.9284, 0.9216,\n",
              "                      0.9271, 0.9762, 0.9407, 0.9172, 0.9450, 0.9233, 0.9382, 0.9397, 0.9265,\n",
              "                      0.9321, 0.9444, 0.9389, 0.9324, 0.9458, 0.9721, 0.9386, 0.9317, 0.9549,\n",
              "                      0.9422, 0.9210, 0.9349, 1.0041, 0.9859, 0.9197, 0.9226, 0.9623])),\n",
              "             ('conv5.1.num_batches_tracked', tensor(1)),\n",
              "             ('conv5.3.weight',\n",
              "              tensor([[[[ 0.0255,  0.0165, -0.0111],\n",
              "                        [-0.0232, -0.0100, -0.0125],\n",
              "                        [-0.0217, -0.0165, -0.0238]],\n",
              "              \n",
              "                       [[ 0.0051, -0.0122,  0.0245],\n",
              "                        [-0.0056,  0.0078, -0.0176],\n",
              "                        [-0.0151, -0.0056, -0.0101]],\n",
              "              \n",
              "                       [[-0.0064,  0.0225, -0.0227],\n",
              "                        [-0.0216,  0.0154, -0.0139],\n",
              "                        [ 0.0023, -0.0015,  0.0103]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0097,  0.0188,  0.0020],\n",
              "                        [-0.0190,  0.0233, -0.0099],\n",
              "                        [-0.0170,  0.0047,  0.0192]],\n",
              "              \n",
              "                       [[ 0.0173,  0.0111,  0.0121],\n",
              "                        [-0.0215, -0.0041,  0.0213],\n",
              "                        [-0.0063,  0.0071,  0.0069]],\n",
              "              \n",
              "                       [[ 0.0010,  0.0014, -0.0175],\n",
              "                        [-0.0075, -0.0113, -0.0213],\n",
              "                        [-0.0187,  0.0246, -0.0149]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0071, -0.0168, -0.0175],\n",
              "                        [-0.0117, -0.0074,  0.0106],\n",
              "                        [ 0.0233,  0.0206, -0.0017]],\n",
              "              \n",
              "                       [[ 0.0125,  0.0105,  0.0246],\n",
              "                        [ 0.0056, -0.0091, -0.0189],\n",
              "                        [-0.0178, -0.0030,  0.0240]],\n",
              "              \n",
              "                       [[-0.0160, -0.0202,  0.0210],\n",
              "                        [-0.0127,  0.0168, -0.0110],\n",
              "                        [-0.0241,  0.0007,  0.0052]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0079, -0.0242,  0.0092],\n",
              "                        [-0.0080,  0.0074, -0.0057],\n",
              "                        [ 0.0039, -0.0251, -0.0189]],\n",
              "              \n",
              "                       [[ 0.0168,  0.0188, -0.0159],\n",
              "                        [-0.0183, -0.0191, -0.0241],\n",
              "                        [ 0.0212, -0.0224,  0.0186]],\n",
              "              \n",
              "                       [[-0.0185, -0.0218,  0.0210],\n",
              "                        [ 0.0114,  0.0046, -0.0011],\n",
              "                        [-0.0054,  0.0183, -0.0227]]],\n",
              "              \n",
              "              \n",
              "                      [[[-0.0247, -0.0098, -0.0175],\n",
              "                        [ 0.0231,  0.0165, -0.0049],\n",
              "                        [ 0.0176, -0.0070,  0.0175]],\n",
              "              \n",
              "                       [[-0.0227,  0.0089, -0.0098],\n",
              "                        [ 0.0204,  0.0179,  0.0020],\n",
              "                        [ 0.0076, -0.0086, -0.0225]],\n",
              "              \n",
              "                       [[ 0.0140,  0.0005,  0.0034],\n",
              "                        [ 0.0191, -0.0091, -0.0246],\n",
              "                        [ 0.0100, -0.0053,  0.0202]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0092,  0.0075,  0.0063],\n",
              "                        [-0.0204,  0.0112,  0.0039],\n",
              "                        [ 0.0207, -0.0193, -0.0049]],\n",
              "              \n",
              "                       [[-0.0079,  0.0002, -0.0149],\n",
              "                        [-0.0207, -0.0187, -0.0210],\n",
              "                        [ 0.0113, -0.0155,  0.0250]],\n",
              "              \n",
              "                       [[ 0.0084, -0.0219,  0.0043],\n",
              "                        [-0.0179,  0.0233, -0.0103],\n",
              "                        [-0.0026,  0.0187, -0.0163]]],\n",
              "              \n",
              "              \n",
              "                      ...,\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0237,  0.0066,  0.0060],\n",
              "                        [-0.0221, -0.0188,  0.0079],\n",
              "                        [-0.0161,  0.0110, -0.0073]],\n",
              "              \n",
              "                       [[-0.0120,  0.0236,  0.0024],\n",
              "                        [ 0.0050,  0.0089,  0.0153],\n",
              "                        [ 0.0190,  0.0145, -0.0011]],\n",
              "              \n",
              "                       [[-0.0131,  0.0194,  0.0217],\n",
              "                        [-0.0096,  0.0186, -0.0157],\n",
              "                        [-0.0024,  0.0129, -0.0243]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0002,  0.0225, -0.0107],\n",
              "                        [-0.0175, -0.0004, -0.0169],\n",
              "                        [-0.0180,  0.0207, -0.0212]],\n",
              "              \n",
              "                       [[-0.0158, -0.0116,  0.0084],\n",
              "                        [-0.0077, -0.0145,  0.0172],\n",
              "                        [-0.0078, -0.0131,  0.0192]],\n",
              "              \n",
              "                       [[-0.0015,  0.0254,  0.0197],\n",
              "                        [-0.0068, -0.0023,  0.0022],\n",
              "                        [-0.0005, -0.0141, -0.0243]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0186, -0.0245, -0.0059],\n",
              "                        [ 0.0130,  0.0156, -0.0212],\n",
              "                        [ 0.0094,  0.0102, -0.0010]],\n",
              "              \n",
              "                       [[-0.0077, -0.0031, -0.0063],\n",
              "                        [-0.0018,  0.0093, -0.0052],\n",
              "                        [-0.0244,  0.0230,  0.0003]],\n",
              "              \n",
              "                       [[-0.0176, -0.0163, -0.0134],\n",
              "                        [ 0.0219, -0.0060,  0.0227],\n",
              "                        [-0.0130,  0.0049,  0.0031]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[-0.0087,  0.0251, -0.0101],\n",
              "                        [ 0.0081,  0.0089,  0.0036],\n",
              "                        [-0.0146, -0.0157, -0.0100]],\n",
              "              \n",
              "                       [[ 0.0254, -0.0047,  0.0072],\n",
              "                        [ 0.0087, -0.0204,  0.0004],\n",
              "                        [-0.0155, -0.0082, -0.0114]],\n",
              "              \n",
              "                       [[ 0.0228,  0.0175,  0.0143],\n",
              "                        [ 0.0042,  0.0194, -0.0112],\n",
              "                        [ 0.0123,  0.0179,  0.0113]]],\n",
              "              \n",
              "              \n",
              "                      [[[ 0.0136, -0.0097, -0.0204],\n",
              "                        [ 0.0171,  0.0078,  0.0244],\n",
              "                        [-0.0048, -0.0008,  0.0234]],\n",
              "              \n",
              "                       [[ 0.0154, -0.0246, -0.0044],\n",
              "                        [-0.0184, -0.0242,  0.0153],\n",
              "                        [-0.0103,  0.0238,  0.0157]],\n",
              "              \n",
              "                       [[-0.0119,  0.0218,  0.0030],\n",
              "                        [ 0.0079,  0.0007, -0.0117],\n",
              "                        [-0.0132, -0.0022, -0.0232]],\n",
              "              \n",
              "                       ...,\n",
              "              \n",
              "                       [[ 0.0156,  0.0181, -0.0068],\n",
              "                        [-0.0114,  0.0062,  0.0006],\n",
              "                        [-0.0230, -0.0083,  0.0186]],\n",
              "              \n",
              "                       [[-0.0093, -0.0067, -0.0104],\n",
              "                        [ 0.0220,  0.0131, -0.0239],\n",
              "                        [-0.0180,  0.0055,  0.0197]],\n",
              "              \n",
              "                       [[ 0.0179,  0.0081, -0.0224],\n",
              "                        [-0.0043,  0.0105,  0.0238],\n",
              "                        [-0.0045,  0.0202,  0.0240]]]])),\n",
              "             ('conv5.3.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.4.weight',\n",
              "              tensor([1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n",
              "                      1., 1., 1., 1., 1., 1., 1., 1.])),\n",
              "             ('conv5.4.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
              "                      0., 0., 0., 0., 0., 0., 0., 0.])),\n",
              "             ('conv5.4.running_mean',\n",
              "              tensor([ 5.2712e-02,  1.6303e-03,  8.9063e-03,  1.3485e-02,  3.9466e-02,\n",
              "                      -1.5451e-02,  4.1007e-02,  3.0255e-02, -1.6744e-02, -1.9805e-02,\n",
              "                       2.1173e-02,  1.8918e-03, -1.2371e-02, -7.0480e-03,  1.5635e-02,\n",
              "                      -5.2207e-03, -2.2765e-02,  3.8353e-02,  3.4792e-03, -1.6592e-02,\n",
              "                       6.0534e-03, -1.9009e-02,  2.2602e-02, -2.5700e-04,  4.0654e-02,\n",
              "                      -1.4516e-02,  3.9714e-02,  1.2574e-02, -8.8240e-03, -2.0958e-02,\n",
              "                       1.9795e-02,  6.7475e-03, -4.3497e-03, -5.7995e-03,  3.2460e-02,\n",
              "                      -1.6805e-02,  3.4160e-03,  1.4514e-03, -3.9313e-03, -4.6513e-02,\n",
              "                       2.8112e-02, -2.6173e-02, -3.8341e-02,  4.7158e-02,  9.9651e-03,\n",
              "                       7.0388e-03,  4.2722e-03,  2.6084e-02, -2.7835e-02, -4.6412e-02,\n",
              "                      -6.4786e-03, -3.6508e-03,  1.4683e-02, -1.2521e-03, -1.0905e-02,\n",
              "                       8.2487e-03,  3.2418e-02,  2.0974e-02, -3.0821e-02,  1.0684e-02,\n",
              "                       1.7748e-02, -2.0299e-02, -8.7148e-03,  2.1002e-02,  2.3375e-02,\n",
              "                      -3.0646e-02,  9.7926e-03, -1.4134e-02,  3.0883e-02, -2.3610e-02,\n",
              "                      -1.9020e-02, -2.3904e-02,  1.2957e-02,  2.0167e-02, -6.4399e-02,\n",
              "                      -2.4943e-02,  2.1243e-02,  2.7054e-02, -5.7761e-02, -3.4686e-03,\n",
              "                       2.6877e-02, -1.3153e-02,  1.9091e-03,  5.0410e-03,  9.1388e-03,\n",
              "                       9.3305e-03,  1.6944e-02,  1.2878e-03, -4.4789e-02, -4.3855e-02,\n",
              "                      -1.8359e-02, -4.1631e-05,  1.8910e-02, -2.6532e-02,  3.7023e-03,\n",
              "                       2.0628e-02, -2.9123e-02,  2.0975e-02,  9.8536e-03,  2.0421e-02,\n",
              "                      -6.3710e-03, -2.3773e-02,  1.9791e-02, -2.5644e-02,  1.4164e-02,\n",
              "                       4.7063e-03, -9.3293e-03, -1.2447e-02,  4.9227e-02,  4.9763e-02,\n",
              "                       1.5479e-02,  3.4898e-02, -1.7907e-02,  2.1961e-02,  2.6044e-02,\n",
              "                       3.2721e-02,  2.3159e-03,  2.4642e-02,  2.3734e-02, -7.6820e-03,\n",
              "                      -2.2946e-02,  4.7864e-03, -2.9005e-02, -8.8111e-03, -4.0088e-02,\n",
              "                      -2.3124e-02, -2.6500e-03, -6.4502e-02, -1.3295e-02, -2.7297e-02,\n",
              "                       1.6658e-02,  1.7727e-02, -1.7994e-02,  1.9817e-02,  9.9535e-03,\n",
              "                      -2.7087e-04,  2.2786e-03,  7.9729e-03,  2.2107e-02, -2.7304e-02,\n",
              "                       1.4338e-03, -1.6506e-02, -9.1516e-03, -2.1984e-02,  2.3204e-02,\n",
              "                      -2.0972e-02, -3.6667e-02,  2.5047e-02, -2.6566e-02, -1.9566e-03,\n",
              "                       7.7658e-04,  2.9245e-02,  7.5782e-03,  1.7644e-02,  1.8078e-02,\n",
              "                      -8.8285e-04, -1.8279e-02,  1.3372e-03, -4.4131e-04,  2.4735e-03,\n",
              "                      -2.1528e-02, -6.8648e-03,  7.2923e-02, -1.5668e-02, -1.4631e-02,\n",
              "                      -2.3054e-02, -1.5922e-02,  3.9621e-02,  4.4390e-03,  3.2373e-02,\n",
              "                       1.1487e-02, -3.2913e-02, -1.2677e-02,  1.5073e-03,  2.7245e-02,\n",
              "                      -3.9818e-02,  2.9833e-03, -7.6084e-04,  1.0903e-02, -1.4268e-02,\n",
              "                       4.0407e-04, -1.4568e-02, -4.0524e-02, -7.1591e-03, -1.7705e-02,\n",
              "                      -2.9733e-03, -5.5899e-02,  6.7305e-03, -3.8240e-02, -1.0030e-02,\n",
              "                       1.8436e-02, -9.2754e-03, -1.0331e-02, -2.7246e-02, -1.2557e-02,\n",
              "                       3.4366e-02,  2.5594e-02,  2.2160e-03, -2.3696e-02,  1.9463e-02,\n",
              "                       9.2978e-03,  1.8740e-02,  2.0193e-02,  1.1234e-02, -8.8885e-03,\n",
              "                      -7.6304e-03,  2.4164e-02,  3.2900e-02,  6.7882e-04,  4.0717e-02,\n",
              "                      -4.2020e-03, -9.9069e-03, -6.4629e-02,  2.2296e-03, -1.6981e-02,\n",
              "                      -1.0431e-03,  5.9155e-03, -8.7003e-03,  8.6006e-03,  7.2093e-03,\n",
              "                      -2.3581e-02, -5.8386e-03,  3.0134e-02,  5.1546e-02,  3.2423e-03,\n",
              "                      -9.4348e-03,  2.1064e-02, -1.1639e-02,  3.7933e-02, -2.1540e-02,\n",
              "                       9.5175e-03,  3.5080e-03,  2.3893e-02, -1.1283e-02, -1.3186e-02,\n",
              "                       5.2426e-03,  9.3829e-03,  2.8945e-02, -7.5054e-03, -1.3866e-02,\n",
              "                      -1.4108e-02, -5.4130e-03, -1.1938e-02,  4.2522e-03, -5.2646e-04,\n",
              "                      -1.5801e-02,  1.0522e-03,  7.8176e-03, -1.0868e-02, -3.9166e-02,\n",
              "                      -2.2868e-03,  3.6574e-02,  5.7647e-03, -8.6277e-03,  2.9617e-02,\n",
              "                      -1.4973e-02, -5.7285e-02,  3.9696e-03,  1.4286e-02,  2.3461e-03,\n",
              "                      -4.4354e-03, -5.1043e-03,  4.4457e-04,  1.1935e-02, -1.3487e-02,\n",
              "                      -3.2642e-03,  5.8707e-03,  7.8261e-03,  6.2055e-02,  4.1710e-03,\n",
              "                       1.4596e-02, -2.3778e-02,  1.8958e-02,  1.6896e-02,  1.6694e-02,\n",
              "                      -5.9872e-03, -1.3729e-02, -2.3310e-02,  4.1822e-02,  2.5346e-02,\n",
              "                       1.6919e-02,  1.2198e-02, -1.0087e-02, -1.3242e-02,  1.1846e-02,\n",
              "                       2.1650e-02, -3.2154e-02,  1.0290e-02,  1.8750e-03,  1.7188e-02,\n",
              "                      -1.6292e-02, -2.8725e-02, -3.3247e-02,  1.4748e-03,  1.2578e-02,\n",
              "                       8.7999e-03,  1.0832e-02, -2.8067e-02, -1.8938e-02,  1.9368e-03,\n",
              "                       5.6042e-04, -1.5293e-04,  1.2753e-03,  2.5542e-02,  3.9766e-02,\n",
              "                       1.0048e-02,  3.0280e-02,  3.2440e-03, -2.8752e-03, -2.3559e-02,\n",
              "                       1.2594e-02,  4.0065e-03,  2.0264e-02,  1.7712e-02,  1.6875e-03,\n",
              "                       1.2727e-02,  5.3304e-02,  1.1126e-02, -8.6381e-03, -7.9883e-03,\n",
              "                       9.2762e-03,  2.2328e-02,  4.1657e-06, -2.2479e-02, -3.7970e-02,\n",
              "                       1.9713e-02,  2.2671e-02,  1.2881e-02, -1.0407e-02, -1.6736e-02,\n",
              "                      -5.2750e-03, -1.0167e-02,  9.9187e-03,  5.2919e-04,  3.7523e-03,\n",
              "                       3.1119e-02,  2.2420e-02, -1.3367e-02,  1.9545e-02, -2.4421e-02,\n",
              "                      -1.0185e-02, -1.5210e-02, -4.8901e-03, -3.6452e-02,  1.9538e-02,\n",
              "                      -2.3953e-02,  1.9193e-03,  7.7771e-03,  2.6322e-02,  5.0457e-03,\n",
              "                      -3.2748e-03, -1.8570e-02, -3.1736e-02,  1.6424e-02,  3.7663e-03,\n",
              "                       1.4722e-02,  3.1051e-02, -1.1105e-02,  1.0355e-03,  2.3369e-02,\n",
              "                      -2.1581e-02,  9.8230e-04, -2.7724e-02,  2.0063e-02,  2.2259e-03,\n",
              "                       4.5249e-02,  1.7673e-03,  2.3981e-03, -1.6952e-02, -3.1040e-02,\n",
              "                      -3.4604e-02,  7.8051e-03,  1.0336e-02, -4.5367e-03,  5.8110e-03,\n",
              "                      -2.4012e-02,  3.5235e-04,  1.2635e-03, -2.6404e-02, -3.2959e-02,\n",
              "                       2.1392e-02,  2.3274e-02, -1.4665e-02, -6.0980e-03, -1.4439e-02,\n",
              "                       9.8433e-03,  4.7703e-03,  3.6298e-02, -9.9892e-03,  1.3255e-02,\n",
              "                      -1.0688e-02, -4.9822e-03, -4.0394e-02,  9.8367e-03, -2.0571e-02,\n",
              "                       3.7683e-02, -1.2582e-02,  1.9497e-03,  7.1348e-03, -3.7429e-02,\n",
              "                       1.1939e-03, -4.6368e-03,  2.7080e-02, -1.9436e-02,  1.7951e-02,\n",
              "                       2.1646e-02,  2.4200e-02,  2.7013e-02,  2.1207e-02,  2.1246e-03,\n",
              "                      -2.8446e-02, -1.2383e-02,  1.2061e-02, -4.8189e-02,  2.7086e-02,\n",
              "                      -3.1120e-02, -1.3017e-02,  1.7531e-02, -9.9135e-03, -1.7551e-02,\n",
              "                       1.9255e-02,  2.5833e-03, -4.8615e-03,  5.8079e-03,  1.1844e-03,\n",
              "                      -1.6415e-02,  1.7959e-02, -3.9823e-02, -4.1149e-02, -6.3499e-02,\n",
              "                       7.7608e-03,  1.0891e-02, -1.7358e-03, -1.5258e-02,  1.2500e-02,\n",
              "                      -2.1797e-03,  3.6544e-02,  3.2456e-02,  2.7865e-02,  5.1151e-03,\n",
              "                      -1.1765e-02,  5.3903e-03, -4.8401e-04,  6.2544e-03, -8.4297e-03,\n",
              "                       1.7842e-02, -3.9940e-02, -1.4440e-02,  2.7967e-02,  1.2214e-02,\n",
              "                      -1.8061e-02, -1.0073e-02, -8.9356e-03, -6.0237e-03,  4.1244e-03,\n",
              "                       8.6462e-03,  3.2197e-02,  8.0436e-04, -2.2867e-02,  5.4394e-02,\n",
              "                      -3.6773e-02, -4.1918e-02, -2.6589e-02,  2.1273e-02, -4.5627e-02,\n",
              "                      -1.4945e-03,  4.3842e-02, -5.1090e-03,  3.5666e-02,  4.8378e-03,\n",
              "                      -1.1730e-02,  1.1246e-02,  6.8925e-03, -5.4411e-03,  9.3723e-03,\n",
              "                      -1.9264e-02, -4.4543e-03,  6.1905e-04, -3.0693e-02, -9.8453e-03,\n",
              "                      -3.5959e-02, -1.0056e-02, -4.0758e-02,  3.5790e-02, -1.0643e-02,\n",
              "                      -4.8745e-03,  2.3839e-02,  1.5868e-02,  7.2844e-03,  3.2494e-03,\n",
              "                       8.7154e-03,  1.5860e-02, -1.2711e-02,  7.4929e-03, -4.7385e-02,\n",
              "                       9.4390e-03, -1.1887e-02, -1.2024e-02,  4.0981e-02, -1.2808e-02,\n",
              "                       6.1124e-04, -3.2768e-02,  5.0605e-02,  1.0118e-02, -5.2199e-03,\n",
              "                      -4.1552e-03, -9.0044e-03,  1.1171e-02,  1.1894e-03, -4.0891e-02,\n",
              "                       3.7247e-02, -5.6874e-03])),\n",
              "             ('conv5.4.running_var',\n",
              "              tensor([0.9254, 0.9114, 0.9271, 0.9144, 0.9114, 0.9234, 0.9279, 0.9140, 0.9148,\n",
              "                      0.9148, 0.9259, 0.9168, 0.9185, 0.9211, 0.9143, 0.9142, 0.9092, 0.9109,\n",
              "                      0.9222, 0.9176, 0.9399, 0.9142, 0.9095, 0.9166, 0.9164, 0.9122, 0.9215,\n",
              "                      0.9120, 0.9149, 0.9158, 0.9100, 0.9207, 0.9439, 0.9109, 0.9151, 0.9127,\n",
              "                      0.9162, 0.9265, 0.9138, 0.9244, 0.9265, 0.9153, 0.9139, 0.9123, 0.9127,\n",
              "                      0.9093, 0.9149, 0.9129, 0.9162, 0.9136, 0.9159, 0.9190, 0.9233, 0.9089,\n",
              "                      0.9211, 0.9131, 0.9262, 0.9138, 0.9148, 0.9088, 0.9247, 0.9294, 0.9113,\n",
              "                      0.9144, 0.9108, 0.9093, 0.9146, 0.9128, 0.9094, 0.9183, 0.9140, 0.9509,\n",
              "                      0.9273, 0.9106, 0.9344, 0.9134, 0.9126, 0.9105, 0.9190, 0.9133, 0.9109,\n",
              "                      0.9147, 0.9234, 0.9099, 0.9184, 0.9161, 0.9192, 0.9364, 0.9131, 0.9136,\n",
              "                      0.9122, 0.9111, 0.9225, 0.9217, 0.9176, 0.9212, 0.9220, 0.9318, 0.9171,\n",
              "                      0.9345, 0.9166, 0.9159, 0.9133, 0.9187, 0.9164, 0.9151, 0.9203, 0.9151,\n",
              "                      0.9130, 0.9179, 0.9109, 0.9255, 0.9168, 0.9149, 0.9403, 0.9092, 0.9179,\n",
              "                      0.9114, 0.9135, 0.9173, 0.9362, 0.9159, 0.9165, 0.9144, 0.9252, 0.9132,\n",
              "                      0.9227, 0.9268, 0.9174, 0.9139, 0.9207, 0.9140, 0.9396, 0.9177, 0.9163,\n",
              "                      0.9105, 0.9125, 0.9267, 0.9182, 0.9181, 0.9190, 0.9122, 0.9186, 0.9089,\n",
              "                      0.9189, 0.9158, 0.9210, 0.9197, 0.9261, 0.9126, 0.9205, 0.9190, 0.9155,\n",
              "                      0.9148, 0.9148, 0.9203, 0.9232, 0.9110, 0.9175, 0.9251, 0.9135, 0.9115,\n",
              "                      0.9165, 0.9223, 0.9095, 0.9161, 0.9091, 0.9266, 0.9150, 0.9142, 0.9162,\n",
              "                      0.9203, 0.9164, 0.9223, 0.9187, 0.9100, 0.9138, 0.9209, 0.9217, 0.9117,\n",
              "                      0.9107, 0.9236, 0.9282, 0.9106, 0.9224, 0.9167, 0.9324, 0.9337, 0.9254,\n",
              "                      0.9131, 0.9202, 0.9377, 0.9221, 0.9111, 0.9118, 0.9136, 0.9123, 0.9129,\n",
              "                      0.9183, 0.9217, 0.9241, 0.9131, 0.9362, 0.9101, 0.9142, 0.9130, 0.9127,\n",
              "                      0.9201, 0.9277, 0.9136, 0.9148, 0.9112, 0.9188, 0.9140, 0.9121, 0.9175,\n",
              "                      0.9105, 0.9137, 0.9102, 0.9307, 0.9152, 0.9294, 0.9262, 0.9113, 0.9128,\n",
              "                      0.9160, 0.9186, 0.9192, 0.9251, 0.9119, 0.9119, 0.9161, 0.9195, 0.9174,\n",
              "                      0.9121, 0.9117, 0.9164, 0.9182, 0.9113, 0.9169, 0.9316, 0.9348, 0.9115,\n",
              "                      0.9224, 0.9194, 0.9111, 0.9089, 0.9128, 0.9141, 0.9106, 0.9168, 0.9460,\n",
              "                      0.9181, 0.9240, 0.9342, 0.9145, 0.9297, 0.9185, 0.9176, 0.9369, 0.9120,\n",
              "                      0.9215, 0.9143, 0.9130, 0.9220, 0.9102, 0.9135, 0.9199, 0.9202, 0.9184,\n",
              "                      0.9177, 0.9230, 0.9101, 0.9296, 0.9165, 0.9121, 0.9102, 0.9129, 0.9078,\n",
              "                      0.9120, 0.9146, 0.9194, 0.9174, 0.9224, 0.9132, 0.9141, 0.9093, 0.9193,\n",
              "                      0.9157, 0.9268, 0.9265, 0.9110, 0.9172, 0.9317, 0.9143, 0.9117, 0.9123,\n",
              "                      0.9108, 0.9118, 0.9142, 0.9102, 0.9114, 0.9101, 0.9121, 0.9165, 0.9162,\n",
              "                      0.9145, 0.9238, 0.9320, 0.9212, 0.9188, 0.9149, 0.9133, 0.9139, 0.9107,\n",
              "                      0.9163, 0.9274, 0.9190, 0.9116, 0.9365, 0.9159, 0.9124, 0.9179, 0.9244,\n",
              "                      0.9111, 0.9104, 0.9141, 0.9113, 0.9105, 0.9124, 0.9154, 0.9147, 0.9103,\n",
              "                      0.9140, 0.9137, 0.9096, 0.9252, 0.9163, 0.9150, 0.9105, 0.9169, 0.9128,\n",
              "                      0.9119, 0.9159, 0.9239, 0.9168, 0.9152, 0.9223, 0.9169, 0.9155, 0.9166,\n",
              "                      0.9189, 0.9264, 0.9163, 0.9138, 0.9181, 0.9201, 0.9110, 0.9274, 0.9204,\n",
              "                      0.9261, 0.9302, 0.9200, 0.9129, 0.9241, 0.9339, 0.9217, 0.9133, 0.9095,\n",
              "                      0.9193, 0.9110, 0.9105, 0.9122, 0.9165, 0.9252, 0.9161, 0.9149, 0.9119,\n",
              "                      0.9110, 0.9277, 0.9149, 0.9184, 0.9149, 0.9264, 0.9124, 0.9398, 0.9233,\n",
              "                      0.9172, 0.9240, 0.9337, 0.9106, 0.9138, 0.9211, 0.9109, 0.9149, 0.9128,\n",
              "                      0.9108, 0.9247, 0.9115, 0.9198, 0.9084, 0.9197, 0.9095, 0.9272, 0.9114,\n",
              "                      0.9124, 0.9151, 0.9286, 0.9154, 0.9093, 0.9112, 0.9215, 0.9194, 0.9120,\n",
              "                      0.9158, 0.9170, 0.9165, 0.9138, 0.9301, 0.9162, 0.9097, 0.9222, 0.9138,\n",
              "                      0.9220, 0.9130, 0.9216, 0.9164, 0.9169, 0.9182, 0.9407, 0.9090, 0.9131,\n",
              "                      0.9224, 0.9112, 0.9145, 0.9099, 0.9156, 0.9207, 0.9136, 0.9281, 0.9126,\n",
              "                      0.9233, 0.9222, 0.9157, 0.9283, 0.9163, 0.9163, 0.9357, 0.9240, 0.9113,\n",
              "                      0.9210, 0.9143, 0.9118, 0.9108, 0.9152, 0.9226, 0.9120, 0.9222, 0.9157,\n",
              "                      0.9228, 0.9564, 0.9243, 0.9281, 0.9195, 0.9229, 0.9108, 0.9154, 0.9153,\n",
              "                      0.9219, 0.9158, 0.9286, 0.9263, 0.9156, 0.9151, 0.9148, 0.9119, 0.9166,\n",
              "                      0.9140, 0.9234, 0.9124, 0.9146, 0.9133, 0.9172, 0.9210, 0.9169, 0.9171,\n",
              "                      0.9274, 0.9097, 0.9265, 0.9091, 0.9336, 0.9179, 0.9152, 0.9143, 0.9134,\n",
              "                      0.9172, 0.9134, 0.9075, 0.9166, 0.9155, 0.9168, 0.9427, 0.9170, 0.9136,\n",
              "                      0.9186, 0.9185, 0.9360, 0.9128, 0.9181, 0.9305, 0.9101, 0.9171])),\n",
              "             ('conv5.4.num_batches_tracked', tensor(1)),\n",
              "             ('classifier.0.weight',\n",
              "              tensor([[ 0.0159, -0.0324,  0.0320,  ...,  0.0359,  0.0317, -0.0124],\n",
              "                      [ 0.0069, -0.0245,  0.0165,  ..., -0.0212, -0.0255, -0.0049],\n",
              "                      [ 0.0326, -0.0048, -0.0151,  ...,  0.0162,  0.0144, -0.0291],\n",
              "                      ...,\n",
              "                      [-0.0094, -0.0238,  0.0016,  ...,  0.0304, -0.0243,  0.0159],\n",
              "                      [ 0.0072, -0.0316, -0.0336,  ...,  0.0153,  0.0015,  0.0244],\n",
              "                      [-0.0251, -0.0333,  0.0067,  ...,  0.0065, -0.0251,  0.0299]])),\n",
              "             ('classifier.0.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
              "             ('classifier.3.weight',\n",
              "              tensor([[ 0.0243,  0.0222, -0.0127,  ..., -0.0253,  0.0243,  0.0192],\n",
              "                      [-0.0095, -0.0269,  0.0119,  ..., -0.0107, -0.0194,  0.0036],\n",
              "                      [-0.0129, -0.0196, -0.0219,  ...,  0.0166, -0.0004,  0.0014],\n",
              "                      ...,\n",
              "                      [-0.0172,  0.0208,  0.0054,  ..., -0.0231,  0.0008, -0.0071],\n",
              "                      [ 0.0246, -0.0224, -0.0169,  ..., -0.0056,  0.0257,  0.0145],\n",
              "                      [ 0.0138,  0.0028,  0.0168,  ..., -0.0223,  0.0158, -0.0072]])),\n",
              "             ('classifier.3.bias', tensor([0., 0., 0.,  ..., 0., 0., 0.])),\n",
              "             ('classifier.6.weight',\n",
              "              tensor([[-0.0002, -0.0136, -0.0364,  ..., -0.0342,  0.0215, -0.0137],\n",
              "                      [-0.0289,  0.0234, -0.0212,  ..., -0.0166, -0.0294, -0.0307],\n",
              "                      [ 0.0013, -0.0224,  0.0237,  ...,  0.0263, -0.0100, -0.0196],\n",
              "                      ...,\n",
              "                      [ 0.0369, -0.0279,  0.0274,  ..., -0.0253,  0.0151,  0.0259],\n",
              "                      [ 0.0179, -0.0229, -0.0339,  ...,  0.0023,  0.0352,  0.0003],\n",
              "                      [-0.0352, -0.0260,  0.0234,  ...,  0.0343, -0.0079, -0.0125]])),\n",
              "             ('classifier.6.bias',\n",
              "              tensor([0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]))])"
            ]
          },
          "metadata": {},
          "execution_count": 15
        }
      ],
      "source": [
        "model.state_dict()"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "p_o6dQ3VaGiW"
      },
      "source": [
        "# 设置交叉熵损失函数，SGD优化器"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 16,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.023837Z",
          "start_time": "2025-06-26T01:43:40.019952Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "ycFLHrlbaGiW",
        "outputId": "128c8698-cb8e-4475-afad-8cdba1c1f7e2"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "损失函数: CrossEntropyLoss()\n"
          ]
        }
      ],
      "source": [
        "\n",
        "# 定义损失函数和优化器\n",
        "loss_fn = nn.CrossEntropyLoss()  # 交叉熵损失函数，适用于多分类问题，里边会做softmax，还有会把0-9标签转换成one-hot编码\n",
        "\n",
        "print(\"损失函数:\", loss_fn)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 17,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:43:40.035848Z",
          "start_time": "2025-06-26T01:43:40.032419Z"
        },
        "id": "CVErMt43aGiW"
      },
      "outputs": [],
      "source": [
        "model = VGG11_CIFAR10()\n",
        "\n",
        "optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)  # SGD优化器，学习率为0.01，动量为0.9"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 18,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.732814Z",
          "start_time": "2025-06-26T01:43:40.035848Z"
        },
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 118,
          "referenced_widgets": [
            "14786a5793af4a6a8adab22fafce3819",
            "5a4fcda909bb4a5b9de631c64811c7db",
            "9106c42ddbfb422198f28c7abfd448bc",
            "37949ba29bd54c389b3bd85bf59e70f9",
            "74a2212a3ba142ff853abbc47ee5ab50",
            "8b8f8329abcc42569b05a078f5f4aff3",
            "a9e94152f03d4ad38cd6c487e880a5c2",
            "f1550c7347964f4691f55dcdc3ba4291",
            "0c37256bf2f6455697b1ad8380f534c9",
            "d3620964d5e74ddc9b5540cb8bcd396a",
            "020c86af32014f5f84dff06106b0b6a8"
          ]
        },
        "id": "Topc5Kb8aGiW",
        "outputId": "b2cf2028-e369-4f50-fc77-c5b727aa94c3"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "使用设备: cuda:0\n",
            "训练开始，共训练35200步\n"
          ]
        },
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "  0%|          | 0/35200 [00:00<?, ?it/s]"
            ],
            "application/vnd.jupyter.widget-view+json": {
              "version_major": 2,
              "version_minor": 0,
              "model_id": "14786a5793af4a6a8adab22fafce3819"
            }
          },
          "metadata": {}
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "早停触发! 最佳验证准确率(如果是回归，则是损失): 77.1200\n",
            "早停: 在14000步时，验证准确率没有提升！\n"
          ]
        }
      ],
      "source": [
        "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
        "print(f\"使用设备: {device}\")\n",
        "model = model.to(device) #将模型移动到GPU\n",
        "early_stopping=EarlyStopping(patience=5, delta=0.001)\n",
        "model_saver=ModelSaver(save_dir='model_weights', save_best_only=True)\n",
        "\n",
        "\n",
        "model, history = train_classification_model(model, train_loader, val_loader, loss_fn, optimizer, device, num_epochs=50,\n",
        "early_stopping=early_stopping, model_saver=model_saver, tensorboard_logger=None)\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 19,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.737721Z",
          "start_time": "2025-06-26T01:45:37.732814Z"
        },
        "collapsed": true,
        "id": "zJvmR6bRaGiW",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "756e5328-682d-4cc8-e81d-6560f5358d0d"
      },
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "[{'loss': 0.031177038326859474, 'acc': 98.4375, 'step': 13901},\n",
              " {'loss': 0.005124994553625584, 'acc': 100.0, 'step': 13902},\n",
              " {'loss': 0.02550998330116272, 'acc': 98.4375, 'step': 13903},\n",
              " {'loss': 0.006953707896173, 'acc': 100.0, 'step': 13904},\n",
              " {'loss': 0.027410905808210373, 'acc': 98.4375, 'step': 13905},\n",
              " {'loss': 0.05206426978111267, 'acc': 98.4375, 'step': 13906},\n",
              " {'loss': 0.020412303507328033, 'acc': 98.4375, 'step': 13907},\n",
              " {'loss': 0.054040662944316864, 'acc': 96.875, 'step': 13908},\n",
              " {'loss': 0.03516220301389694, 'acc': 98.4375, 'step': 13909},\n",
              " {'loss': 0.06160327419638634, 'acc': 98.4375, 'step': 13910},\n",
              " {'loss': 0.005969769321382046, 'acc': 100.0, 'step': 13911},\n",
              " {'loss': 0.004497142508625984, 'acc': 100.0, 'step': 13912},\n",
              " {'loss': 0.07827160507440567, 'acc': 98.4375, 'step': 13913},\n",
              " {'loss': 0.053018953651189804, 'acc': 96.875, 'step': 13914},\n",
              " {'loss': 0.0010499388445168734, 'acc': 100.0, 'step': 13915},\n",
              " {'loss': 0.03898509964346886, 'acc': 98.4375, 'step': 13916},\n",
              " {'loss': 0.0884038507938385, 'acc': 98.4375, 'step': 13917},\n",
              " {'loss': 0.030309418216347694, 'acc': 98.4375, 'step': 13918},\n",
              " {'loss': 0.09972849488258362, 'acc': 95.3125, 'step': 13919},\n",
              " {'loss': 0.009719165042042732, 'acc': 100.0, 'step': 13920},\n",
              " {'loss': 0.06447956711053848, 'acc': 96.875, 'step': 13921},\n",
              " {'loss': 0.026230020448565483, 'acc': 98.4375, 'step': 13922},\n",
              " {'loss': 0.042199838906526566, 'acc': 98.4375, 'step': 13923},\n",
              " {'loss': 0.042753539979457855, 'acc': 98.4375, 'step': 13924},\n",
              " {'loss': 0.11738956719636917, 'acc': 96.875, 'step': 13925},\n",
              " {'loss': 0.043620795011520386, 'acc': 98.4375, 'step': 13926},\n",
              " {'loss': 0.0032873605377972126, 'acc': 100.0, 'step': 13927},\n",
              " {'loss': 0.016480786725878716, 'acc': 100.0, 'step': 13928},\n",
              " {'loss': 0.005665620323270559, 'acc': 100.0, 'step': 13929},\n",
              " {'loss': 0.013497316278517246, 'acc': 100.0, 'step': 13930},\n",
              " {'loss': 0.018484128639101982, 'acc': 98.4375, 'step': 13931},\n",
              " {'loss': 0.006519886199384928, 'acc': 100.0, 'step': 13932},\n",
              " {'loss': 0.020177286118268967, 'acc': 98.4375, 'step': 13933},\n",
              " {'loss': 0.05602408945560455, 'acc': 98.4375, 'step': 13934},\n",
              " {'loss': 0.08140362054109573, 'acc': 96.875, 'step': 13935},\n",
              " {'loss': 0.007974807173013687, 'acc': 100.0, 'step': 13936},\n",
              " {'loss': 0.042592622339725494, 'acc': 98.4375, 'step': 13937},\n",
              " {'loss': 0.01177295297384262, 'acc': 100.0, 'step': 13938},\n",
              " {'loss': 0.1371576189994812, 'acc': 95.3125, 'step': 13939},\n",
              " {'loss': 0.04716363176703453, 'acc': 96.875, 'step': 13940},\n",
              " {'loss': 0.0036500473506748676, 'acc': 100.0, 'step': 13941},\n",
              " {'loss': 0.10866731405258179, 'acc': 98.4375, 'step': 13942},\n",
              " {'loss': 0.005373062565922737, 'acc': 100.0, 'step': 13943},\n",
              " {'loss': 0.009574191644787788, 'acc': 100.0, 'step': 13944},\n",
              " {'loss': 0.06539559364318848, 'acc': 98.4375, 'step': 13945},\n",
              " {'loss': 0.03502863645553589, 'acc': 100.0, 'step': 13946},\n",
              " {'loss': 0.0014290957478806376, 'acc': 100.0, 'step': 13947},\n",
              " {'loss': 0.1473468840122223, 'acc': 96.875, 'step': 13948},\n",
              " {'loss': 0.0872851237654686, 'acc': 95.3125, 'step': 13949},\n",
              " {'loss': 0.05698040872812271, 'acc': 96.875, 'step': 13950},\n",
              " {'loss': 0.022659510374069214, 'acc': 100.0, 'step': 13951},\n",
              " {'loss': 0.03762879595160484, 'acc': 98.4375, 'step': 13952},\n",
              " {'loss': 0.005107044242322445, 'acc': 100.0, 'step': 13953},\n",
              " {'loss': 0.03251751884818077, 'acc': 98.4375, 'step': 13954},\n",
              " {'loss': 0.07979651540517807, 'acc': 96.875, 'step': 13955},\n",
              " {'loss': 0.003018229268491268, 'acc': 100.0, 'step': 13956},\n",
              " {'loss': 0.02527882158756256, 'acc': 100.0, 'step': 13957},\n",
              " {'loss': 0.07604281604290009, 'acc': 96.875, 'step': 13958},\n",
              " {'loss': 0.05118727311491966, 'acc': 96.875, 'step': 13959},\n",
              " {'loss': 0.010979247279465199, 'acc': 100.0, 'step': 13960},\n",
              " {'loss': 0.01548344548791647, 'acc': 98.4375, 'step': 13961},\n",
              " {'loss': 0.05547577142715454, 'acc': 96.875, 'step': 13962},\n",
              " {'loss': 0.0022788699716329575, 'acc': 100.0, 'step': 13963},\n",
              " {'loss': 0.011416705325245857, 'acc': 100.0, 'step': 13964},\n",
              " {'loss': 0.03209684416651726, 'acc': 98.4375, 'step': 13965},\n",
              " {'loss': 0.04512711241841316, 'acc': 98.4375, 'step': 13966},\n",
              " {'loss': 0.024566128849983215, 'acc': 98.4375, 'step': 13967},\n",
              " {'loss': 0.015216268599033356, 'acc': 100.0, 'step': 13968},\n",
              " {'loss': 0.014618814922869205, 'acc': 100.0, 'step': 13969},\n",
              " {'loss': 0.02047683298587799, 'acc': 98.4375, 'step': 13970},\n",
              " {'loss': 0.009200780652463436, 'acc': 100.0, 'step': 13971},\n",
              " {'loss': 0.010798841714859009, 'acc': 100.0, 'step': 13972},\n",
              " {'loss': 0.07147733867168427, 'acc': 98.4375, 'step': 13973},\n",
              " {'loss': 0.00704289972782135, 'acc': 100.0, 'step': 13974},\n",
              " {'loss': 0.03423742577433586, 'acc': 98.4375, 'step': 13975},\n",
              " {'loss': 0.02512846700847149, 'acc': 98.4375, 'step': 13976},\n",
              " {'loss': 0.02520725503563881, 'acc': 100.0, 'step': 13977},\n",
              " {'loss': 0.0287299957126379, 'acc': 96.875, 'step': 13978},\n",
              " {'loss': 0.1570120006799698, 'acc': 93.75, 'step': 13979},\n",
              " {'loss': 0.00271056592464447, 'acc': 100.0, 'step': 13980},\n",
              " {'loss': 0.04426468163728714, 'acc': 98.4375, 'step': 13981},\n",
              " {'loss': 0.004423962906002998, 'acc': 100.0, 'step': 13982},\n",
              " {'loss': 0.003460661740973592, 'acc': 100.0, 'step': 13983},\n",
              " {'loss': 0.0016179141821339726, 'acc': 100.0, 'step': 13984},\n",
              " {'loss': 0.004118079319596291, 'acc': 100.0, 'step': 13985},\n",
              " {'loss': 0.019887391477823257, 'acc': 100.0, 'step': 13986},\n",
              " {'loss': 0.049110300838947296, 'acc': 96.875, 'step': 13987},\n",
              " {'loss': 0.0519719198346138, 'acc': 96.875, 'step': 13988},\n",
              " {'loss': 0.012618666514754295, 'acc': 100.0, 'step': 13989},\n",
              " {'loss': 0.04236004129052162, 'acc': 98.4375, 'step': 13990},\n",
              " {'loss': 0.09483177959918976, 'acc': 95.3125, 'step': 13991},\n",
              " {'loss': 0.0561918206512928, 'acc': 98.4375, 'step': 13992},\n",
              " {'loss': 0.01088979747146368, 'acc': 100.0, 'step': 13993},\n",
              " {'loss': 0.0016625558491796255, 'acc': 100.0, 'step': 13994},\n",
              " {'loss': 0.035769205540418625, 'acc': 98.4375, 'step': 13995},\n",
              " {'loss': 0.0178078506141901, 'acc': 98.4375, 'step': 13996},\n",
              " {'loss': 0.10217855870723724, 'acc': 96.875, 'step': 13997},\n",
              " {'loss': 0.008981341496109962, 'acc': 100.0, 'step': 13998},\n",
              " {'loss': 0.026969967409968376, 'acc': 98.4375, 'step': 13999}]"
            ]
          },
          "metadata": {},
          "execution_count": 19
        }
      ],
      "source": [
        "history['train'][-100:-1]"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 20,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.741226Z",
          "start_time": "2025-06-26T01:45:37.737721Z"
        },
        "collapsed": true,
        "id": "6OmEkKUTaGiW",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "6c7c789e-daf8-4507-dce7-9f460c9f302b"
      },
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "[{'loss': 2.3026680618286135, 'acc': 9.86, 'step': 0},\n",
              " {'loss': 1.274044580078125, 'acc': 54.26, 'step': 500},\n",
              " {'loss': 1.0785988178253174, 'acc': 61.2, 'step': 1000},\n",
              " {'loss': 0.9006427439689636, 'acc': 67.72, 'step': 1500},\n",
              " {'loss': 0.9015659482955932, 'acc': 69.36, 'step': 2000},\n",
              " {'loss': 0.8562602143287659, 'acc': 70.44, 'step': 2500},\n",
              " {'loss': 0.7922080218315124, 'acc': 73.74, 'step': 3000},\n",
              " {'loss': 0.8304703901290894, 'acc': 71.64, 'step': 3500},\n",
              " {'loss': 0.8384222874641418, 'acc': 72.64, 'step': 4000},\n",
              " {'loss': 0.87485795211792, 'acc': 74.4, 'step': 4500},\n",
              " {'loss': 0.8350373974084854, 'acc': 75.02, 'step': 5000},\n",
              " {'loss': 1.0123074461936952, 'acc': 72.64, 'step': 5500},\n",
              " {'loss': 0.9740776277542115, 'acc': 73.84, 'step': 6000},\n",
              " {'loss': 1.0517322936058044, 'acc': 74.1, 'step': 6500},\n",
              " {'loss': 0.9976275441169739, 'acc': 75.1, 'step': 7000},\n",
              " {'loss': 1.162524168777466, 'acc': 73.28, 'step': 7500},\n",
              " {'loss': 1.0459458014965057, 'acc': 75.46, 'step': 8000},\n",
              " {'loss': 1.030747349357605, 'acc': 75.42, 'step': 8500},\n",
              " {'loss': 1.107746133518219, 'acc': 75.16, 'step': 9000},\n",
              " {'loss': 1.0947693258285522, 'acc': 75.98, 'step': 9500},\n",
              " {'loss': 1.148284345511347, 'acc': 75.16, 'step': 10000},\n",
              " {'loss': 1.1685646906852722, 'acc': 75.58, 'step': 10500},\n",
              " {'loss': 1.206307688331604, 'acc': 76.4, 'step': 11000},\n",
              " {'loss': 1.1160072517395019, 'acc': 77.12, 'step': 11500},\n",
              " {'loss': 1.2099683304786681, 'acc': 77.02, 'step': 12000},\n",
              " {'loss': 1.1936882111027838, 'acc': 76.72, 'step': 12500},\n",
              " {'loss': 1.157233641386032, 'acc': 76.6, 'step': 13000},\n",
              " {'loss': 1.184826047229767, 'acc': 76.6, 'step': 13500}]"
            ]
          },
          "metadata": {},
          "execution_count": 20
        }
      ],
      "source": [
        "history['val'][-1000:-1]"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 21,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.816716Z",
          "start_time": "2025-06-26T01:45:37.744941Z"
        },
        "id": "eFbn-w1LaGiX",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 465
        },
        "outputId": "8653bcdf-55be-4a72-ab4f-ba9f9b275ea0"
      },
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "text/plain": [
              "<Figure size 1000x500 with 2 Axes>"
            ],
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzoAAAHACAYAAABqJx3iAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAqTxJREFUeJzs3Xd4VGXax/HvzGTSO5AChN57r4ogTVAUO4qKDRvoIrvrrvuqu1Z2XQsW7Ao2sKOuIBJBeq/SO4SWAOk9k8x5/zhJIJJAyiST8vtc11yZOXPOmTs5kMw99/Pcj8UwDAMREREREZFaxOruAERERERERFxNiY6IiIiIiNQ6SnRERERERKTWUaIjIiIiIiK1jhIdERERERGpdZToiIiIiIhIraNER0REREREah0lOiIiIiIiUut4uDuA0nA6nZw4cYKAgAAsFou7wxERqTMMwyA1NZWGDRtiteqzsQL6uyQi4j6l/dtUIxKdEydOEBUV5e4wRETqrKNHj9K4cWN3h1Ft6O+SiIj7XexvU41IdAICAgDzmwkMDCzz8Q6Hg4ULFzJixAjsdrurw5MqoutYO+g61iwpKSlERUUV/h4Wk/4uSQFdy9pB17FmKe3fphqR6BQMCwgMDCz3HxRfX18CAwP1j7cG03WsHXQdayYNzypKf5ekgK5l7aDrWDNd7G+TBlyLiIiIiEito0RHRERERERqHSU6IiIiIiJS69SIOToiUj0ZhkFubi55eXmlPsbhcODh4UFWVlaZjpPKYbPZ8PDw0BwcERGpdZToiEi55OTkcPLkSTIyMsp0nGEYREREcPToUb25riZ8fX2JjIzE09PT3aGIiIi4jBIdESkzp9PJoUOHsNlsNGzYEE9Pz1InLU6nk7S0NPz9/bUApZsZhkFOTg6nT5/m0KFDtG7dWtdERERqDSU6IlJmOTk5OJ1OoqKi8PX1LdOxTqeTnJwcvL299aa6GvDx8cFut3PkyJHC6yIiIlIb6F2GiJSbEpXaQddRRERqI/11ExERERGRWkeJjoiIiIiI1DpKdEREyqlZs2ZMnz7dJedasmQJFouFpKQkl5yvNlu2bBljxoyhYcOGWCwWvv/++yLPG4bBU089RWRkJD4+PgwbNox9+/YV2SchIYHx48cTGBhIcHAw99xzD2lpaVX4XYiISGVToiMidcrgwYOZMmWKS861fv167rvvPpecS0ovPT2drl27MmPGjGKff/HFF3n99dd55513WLt2LX5+fowcOZKsrKzCfcaPH8+OHTuIjo7mp59+YtmyZbqWIiK1jLquiYicwzAM8vLy8PC4+K/HBg0aVEFE8kejRo1i1KhRxT5nGAbTp0/niSee4JprrgHgk08+ITw8nO+//55x48axa9cuFixYwPr16+nVqxcAb7zxBqNHj+all16iYcOGVfa9iIhI5an1ic6yvaeZNn8XfrlWRrs7GJFayjAMMh15pdrX6XSSmZOHR06uS7p9+dhtpV7D584772Tp0qUsXbqU1157DYCZM2dy1113MX/+fJ544gm2bdvGwoULiYqKYurUqaxZs4b09HTat2/PtGnTGDZsWOH5mjVrxpQpUworRBaLhffff5958+bxyy+/0KhRI15++WWuvvrqcn1v3377LU899RT79+8nMjKShx9+mD//+c+Fz7/11lu8+uqrHD16lKCgIC699FK++eYbAL755huefvpp9u/fj6+vL927d+eHH37Az8+vXLHUFIcOHSI2NrbIdQoKCqJv376sXr2acePGsXr1aoKDgwuTHIBhw4ZhtVpZu3Yt11577Xnnzc7OJjs7u/BxSkoKAA6HA4fDUeY4C44pz7FSvehaVk8nkjJ5dt5uejcL4a4BTS/6d6KyrmNunpOXovexYn+8S897rtZh/jx4WXPahAe45HyGYbDyQAIfrjzM6dTsix9QTp0aBfLvazuV69jSXqdan+jkOp3sik2lsZ9WYBepLJmOPDo89YtbXnvnMyPx9Szdr7LXXnuNvXv30qlTJ5555hkAduzYAcDf//53XnrpJVq0aEFISAhHjx5l9OjRPP/883h5efHJJ58wZswY9uzZQ5MmTUp8jaeffpoXX3yR//73v7zxxhuMHz+eI0eOEBoaWqbva+PGjdx0003861//4uabb2bVqlU89NBD1KtXjzvvvJMNGzbwyCOP8OmnnzJgwAASEhJYvnw5ACdPnuSWW27hxRdf5NprryU1NZXly5djGEaZYqiJYmNjAQgPDy+yPTw8vPC52NhYwsLCijzv4eFBaGho4T5/NG3aNJ5++unzti9cuLDMa0mdKzo6utzHSvWia1l9JGXDGztsnMm28Ovu02zavpvRUc5SHevK6+g04LP9VjaeqdyZInvi0pi37STd6hmMinIS7lP+c+1LtvDzUSsHUiv/fXNeZirz58eU69iMjIxS7VfrE50QX08A0vVBi0idFxQUhKenJ76+vkRERACwe/duAJ555hmGDx9euG9oaChdu3YtfPzss88yd+5cfvzxRyZPnlzia9x5553ccsstALzwwgu8/vrrrFu3jiuuuKJMsb7yyisMHTqUJ598EoA2bdqwc+dO/vvf/3LnnXcSExODn58fV111FQEBATRt2pTu3bsDZqKTm5vLddddR9OmTQHo3LlzmV5finr88ceZOnVq4eOUlBSioqIYMWIEgYGBZT6fw+EgOjqa4cOHY7fbXRmqVDFdy+rlVGo24z9cz5nsDEJ87SRmOPjlmJX2bdswaXCLEo9z9XV0Og3+/v0ONp45gYfVwpNXtqNpvfJ/KFISR56TbzedYMGOODbHW9iaYOXqLpFMHtKyTK+38Ugiry0+wOqDCQB4eli5pXdjBrdpQCkHTZRZkLedTo3K/vsTzlbVL6bWJzqhfvmJTq6bAxGpxXzsNnY+M7JU+zqdTlJTUgkIDHDZ0DVXOHcYE0BaWhr/+te/mDdvXmHikJmZSUzMhT996tKlS+F9Pz8/AgMDOXXqVJnj2bVrV+EckwIDBw5k+vTp5OXlMXz4cJo2bUqLFi244ooruOKKK7j22mvx9fWla9euDB06lM6dOzNy5EhGjBjBDTfcQEhISJnjqGkKEti4uDgiIyMLt8fFxdGtW7fCff54TXJzc0lISCg8/o+8vLzw8vI6b7vdbq/Qm6KKHi/Vh66l+51OzeaOmRs4HJ9Bo2AfvrivH/O3nWTaz7uZvmg/XnYPHhzc8oLncMV1dDoNnvhhG3M3n8BmtfD6Ld0Z3Tny4geW0/CODdlxIpnpv+4jemcc3289yf+2xXJ9j0Y8fHlrokJLTni2HE3ilei9LNt7GgC7zcK43k14aEhLIoMqUBqqZKW9RmV6lzFt2jR69+5NQEAAYWFhjB07lj179lzwmFmzZmGxWIrcvL29y/KyFVKQ6OQ4LWTmlG4OgYiUjcViwdfTo9Q3H09bmfa/0K2083Mu5o9zV/7yl78wd+5cXnjhBZYvX86WLVvo3LkzOTk5FzzPH3/5WiwWnM7SDZkoi4CAADZt2sScOXOIjIzkqaeeomvXriQlJWGz2YiOjubnn3+mQ4cOvPHGG7Rt25ZDhw65PI7qpnnz5kRERLBo0aLCbSkpKaxdu5b+/fsD0L9/f5KSkti4cWPhPosXL8bpdNK3b98qj1lEKi4+LZvxH6zhwOl0IoO8mTOxH1Ghvtx/WUv+OrItAP9ZsJsPlh+s1DgMw+DJH7bz5YajWC3w6s3dKjXJKdCxYRDv39GLHycPZEjbBuQ5Db7acIwhLy3hH3O3cSIps8j+248nc8+s9YydsZJle0/jYbVwS58ofvvLYJ4d26laJzllUaaKztKlS5k0aRK9e/cmNzeXf/zjH4wYMYKdO3decIJrYGBgkYTIVW9MSsPfywO7zYIjzyAp00GgX9UlWSJS/Xh6epKXd/EPPVauXMmdd95ZODE9LS2Nw4cPV3J0Z7Vv356VK1eeF1ObNm2w2cwqloeHB8OGDWPYsGH885//JDg4mMWLF3PddddhsVgYOHAgAwcO5KmnnqJp06bMnTu3yPCrmiotLY39+/cXPj506BBbtmwhNDSUJk2aMGXKFJ577jlat25N8+bNefLJJ2nYsCFjx44FzJ/tFVdcwcSJE3nnnXdwOBxMnjyZcePGqeOaSA2UlJHDbR+uY29cGmEBXsye2I8m5wzbmjSkFY48J9N/3cdz83Zhs1q4a2Bzl8dhGAZP/28nn6+NwWKBl2/qytVdq/Z3SpfGwcy8qw8bjyQy/de9LN93htlrY/hmwzFu6RPFFZ0imbXqEL/siAPAaoHrejTmkctbF/mZ1RZlSnQWLFhQ5PGsWbMICwtj48aNDBo0qMTjLBZLicMBKpvFYiHE15NTqdkkpOfQpL5bwhCRaqJZs2asXbuWw4cP4+/vX2K1pXXr1nz33XeMGTMGi8XCk08+WSmVmZL8+c9/pnfv3jz77LPcfPPNrF69mjfffJO33noLgJ9++omDBw8yaNAgQkJCmD9/Pk6nk7Zt27J27VoWLVrEiBEjCAsLY+3atZw+fZr27dtXWfyVacOGDQwZMqTwcUHyNmHCBGbNmsVjjz1Geno69913H0lJSVxyySUsWLCgyGiCzz//nMmTJzN06FCsVivXX389r7/+epV/LyK1zfebj/P4d9vIySv978tODQN5ZGhrLm8XVuYPw5MzHNz24Vp2nUyhvr8Xc+7rR/P653/4/qehrcnNM3jzt/08/b+deNis3N6vaZle60IMw+D5ebuYteowAP+5vgvXdm/ssvOXVc+mIXx6T1/WHoznlei9rD2UwMerj/Dx6iMAWCxwTdeGPDK0NS0a+LstzspWoTk6ycnJABftJpSWlkbTpk1xOp306NGDF154gY4dO1bkpcskxNfOqdRsEjPUkUCkrvvLX/7ChAkT6NChA5mZmcycObPY/V555RXuvvtuBgwYQP369fnb3/5W6smPrtCjRw+++uornnrqKZ599lkiIyN55plnuPPOOwEIDg7mu+++41//+hdZWVm0bt2aOXPm0LFjR3bt2sWyZcuYPn06KSkpNG3alJdffrnEtWdqmsGDB1+wg5zFYuGZZ54p7KxXnNDQUGbPnl0Z4YnUWVmOPKb9vKvUyw0U2HosmXs+3kDXqGCmDm/DoNb1S5XwpGQ5uOOjtWw/nkI9P0/mTOxLyxLetFssFv48og0Op5N3lx7kye+35w/XKrmLZmkZhsF/FuzhgxXm8OBp13Xmpl5RFT6vK/RtUY8v7uvH6gPxvBy9l41HErmycyRThrWmtYvaUVdn5U50nE4nU6ZMYeDAgXTqVHIP7LZt2/LRRx/RpUsXkpOTeemllxgwYAA7duygcePiM11Xr1cQ7GN+m2dSM9XnvgbTWgXVh8PhwDAMnE5nmascBW9QC46vaq1atTpvSNgdd9wBUCSeJk2a8OuvvxbZ78EHHyyy38GDB4s8LhgSd+55EhISzttWnEGDBp13/LXXXnvemi4Fzw0YMIDFixefd56Cqs78+fOLfa44TqcTwzBwOByFw+IK6P+biJTW1xuPEZeSTWSQN98+OAAP68WTlexcJ5+vjeHjVYfZejSJCR+to1fTEKYOb8OAViUPw0nLzuXOj9ax9VgyIb52Pp/Y96Jv3C0WC3+/oh25eQYfrjjEP+Zuw8Nq4cYKJiWvRO/lnaUHAHj2mo4uSZ5cyWKxMKBVfQa0qk9GTm6pl2SoDcr9nU6aNInt27ezYsWKC+7Xv3//wgmgYP5xbt++Pe+++y7PPvtssce4er2C7FQrYGX1pm14nvy9zMdL9aK1CtzPw8ODiIgI0tLSLjo5vySpqakujkrKKycnh8zMTJYtW0ZubtEWlaVdq0BE6racXCdv/2bOnXvgspY0DC79ZPa/j2rHvZc2550lB/h0zRE2HEnk1g/W0q9FKFOHt6VP86Ijh9Kzc7lr5jo2xSQR5GPns3v70i6idG2KLRYLT1zZnjynwaxVh3ns29/xsFm4qlP4xQ8uxmu/7uONxeb3/c8xHbi9f7Nynaeq1KUkB8qZ6EyePJmffvqJZcuWlViVKYndbqd79+5FJpL+kavXK1jt2M6W+BOER7Vg9Ii2ZT5eqgetVVB9ZGVlcfToUfz9/cvcRdEwDFJTUwkICKjSxiTu9uCDD/L5558X+9z48eN5++23qziis7KysvDx8WHQoEHnXc+qHK4nIjXXd5uOcSI5i7AAL27uXfYKSX1/L564qgMTB7Xg7SUHmL02hjUHE7jp3dVc2ro+U4a1oWfTEDJz8rjn4/WsP5xIgLcHn93Tl44Ng8r0WhaLhX+O6YAjz6wm/fmrreDsXLZWxMCM3/bz6q97Afi/0e0rpcGBVEyZEh3DMHj44YeZO3cuS5YsoXnzsl/QvLw8tm3bxujRo0vcx9XrFdTzN/9wJ2fn6Q1yLaC1CtwvLy8Pi8WC1Wot81o4BcOnCo6vK5599ln++te/FvtcYGCgW38WVqsVi8VS7P8t/V8TkYtx5DmZscT8APu+QS3wrsD6ZuGB3vzr6o7cN6gFM37bz1cbjrJ83xmW7zvD4LYNyHY4WXMwgQAvDz69py+dG5ctySlgsVh49ppO5DkNvlh/lL98u51xzS20O52Oh/3ib48XbI/lv7+YHYUfu6ItEweVvBipuE+ZEp1JkyYxe/ZsfvjhBwICAoiNjQXM1cZ9fMwS5R133EGjRo2YNm0aYK423q9fP1q1akVSUhL//e9/OXLkCPfee6+Lv5WShfiaf6gT0zXWXETcIywsjLCwMHeHISLicj9sOcHRhEzq+3syvq9rOpk1DPbh+Ws788BlLXlz8X6+2XSMJXvMRS39PG3MursP3aKCK/QaVquFF67tjCPP4NtNx/j8gI3PX1958QPP8efhbXhocKsKxSGVp0yJTsHQisGDBxfZPnPmzMJOQDExMUU+mUxMTGTixInExsYSEhJCz549WbVqFR06dKhY5GVQsGhoYkb55hKIiIiIyPnynAYz8ufm3HtpC3w8y1/NKU5UqC//uaELDw5uyeuL9/H7sWReuLYzPZuGuOT8VquFF2/ogp+nlW83HMHDbgcuPqzabrNy76XNeeCyli6JQypHmYeuXcySJUuKPH711Vd59dVXyxSUq4X4FiQ6quiIiIiIuMpPv5/g0Jl0QnztLl2X5o+a1ffjlZu6Vcq5bVYLT17Zjp6Wg4wePVJDdmuROjFAvnDomhIdEREREZdwOo3CjmP3XNIcP6+61dFLqr86keicO3StNFUpEREREbmwn7fHsv9UGoHeHtwxoJm7wxE5T51IvYN9zIqOI88gLTuXAG+VJEVERKTidp1MJaWSpgCvO5RAQnr2xXfM1zjEl06NyteFrKzMas4+AO4a2JxAvbeSaqhOJDo+njY8rQY5TguJ6Q4lOiJSbs2aNWPKlClMmTLlovtaLBbmzp3L2LFjKz0uEal6n645wpPfb6e+t43rxzhx5dSO+dtO8tDnm8p0jNUCP0y6pNwtl8sielccu2NT8ffy4G6tHyPVVJ1IdAD8PCAnB+LTs2lSz9fd4YiIiEgNNmddDE9+vx2AM1kWfth6klv6NnPJuZ1Og+n5C1G2bOBX2FTpQmJTsjiWmMnri/fx/h29XBJHSQzD4PVFZjVnwoCmBPnqA2SpnupOomOHxBy1mBYREZGK+XrDUf4xdxsAbcP92ROXxjtLD3FjryZ42Co+/Xnhzlj2xqUR4OXBdw8NJMjn4onE/lNpDH91KdE749h5IoUODQMrHEdJfttzih0nUvD1tHHPJVooU6qvOtGMAMDfw2xCkKBFQ0VczzAgJ730N0dG2fa/0K0MDUbee+89GjZsiNPpLLL9mmuu4e677+bAgQNcc801hIeH4+/vT+/evfn1119d9mPatm0bl19+OT4+PtSrV4/77ruPtLS0wueXLFlCnz598PPzIzg4mIEDB3LkyBEAtm7dypAhQwgICCAwMJCePXuyYcMGl8UmIqUzd/MxHvv2dwwD7hzQjC8n9sHPw+BIQgb/+/1Ehc9vVkvMTmZ3DWxWqiQHoFWYP1d1aQjAm7/tq3AcF4rvtfz4bu/XtLDhk0h1VKcqOgCJ6aroiLicIwNeaFiqXa1AsCtf+x8nwNOvVLveeOONPPzww/z2228MHToUgISEBBYsWMD8+fNJS0tj9OjRPP/883h5efHJJ58wZswY9uzZQ5MmTSoUZnp6OiNHjqR///6sX7+eU6dOce+99zJ58mRmzZpFbm4uY8eOZeLEicyZM4ecnBzWrVuHxWIuXDd+/Hi6d+/O22+/jc1mY8uWLVrrQaSK/bj1BH/+aiuGAeP7NuGfYzqQm5vLkIZOfoqx8ebi/VzdtRE268UXnCzJol2n2HkyBT9PG3dfUra5L5OHtOJ/W0/w8/ZY9sal0iY8oNxxlGT5vjNsPZqEt93KvZeqmiPVWx2q6JhfEzR0TaTOCgkJYdSoUcyePbtw2zfffEP9+vUZMmQIXbt25f7776dTp060bt2aZ599lpYtW/Ljjz9W+LVnz55NVlYWn3zyCZ06deLyyy/nzTff5NNPPyUuLo6UlBSSk5O56qqraNmyJe3bt2fChAmFCVZMTAzDhg2jXbt2tG7dmhtvvJGuXbtWOC4RKZ35207y6JdbcBowrncUz17TqfCDiEvDDYJ8PDhwOp35206W+zUMw+D1/E5mdwxoRnAp5uacq21EAKM6RWAY8Gb++jaudO7cnFv7NKVBgJfLX0PElepQRccc3qKKjkglsPualZVScDqdpKSmEhgQgNXqgs9a7GVrLjJ+/HgmTpzIW2+9hZeXF59//jnjxo3DarWSlpbGv/71L+bNm8fJkyfJzc0lMzOTmJiYCoe5a9cuunbtip/f2erTwIEDcTqd7Nmzh0GDBnHnnXcycuRIhg8fzrBhw7jpppuIjIwEYOrUqdx77718+umnDBs2jBtvvJGWLVtWOC4RubiFO2J5ZM5m8pwG1/dozAvXdsZ6TtXG2wMm9G/K64sP8Obi/VzZObLI86W1dO9pfj+WjI/dxr1lrOYUmHx5K37eHstPv5/gT8Na07KBf7nOU5zVB+PZcCQRTw8r91+mao5Uf3WmouNXUNFRoiPiehaLOXystDe7b9n2v9DNUrY3E2PGjMEwDObNm8fRo0dZvnw548ePB+Avf/kLc+fO5YUXXmD58uVs2bKFzp07k5NTNb83Zs6cyerVqxkwYABffvklbdq0Yc2aNQD861//YseOHVx55ZUsXryYDh06MHfu3CqJS6QuW7QrjkmzN5HrNBjbrSEv3tCl2CRmQr8mBHh5sCculYU7Y8v8OudWS27r14R6/uWrlnRsGMSw9uE4DZjxm2urOgXxjesdRXigt0vPLVIZ6kyi418wR0dD10TqNG9vb6677jo+//xz5syZQ9u2benRowcAK1eu5M477+Taa6+lc+fOREREcPjwYZe8bvv27dm6dSvp6emF21auXInVaqVt27aF27p3787jjz/OqlWr6NSpU5Fhdm3atOHRRx9l4cKFXHfddcycOdMlsYlI8ZbsOcWDn23CkWdwVZdIXrqxa4nzbwJ97Nw5sBkAbyzej1GGRikAqw7EsykmCS8PKxMHVaxa8sjQVgD8sOUER+LTL7J36aw7lMCagwnYbRYeuEzVZKkZ6kyio4qOiBQYP3488+bN46OPPiqs5gC0bt2a7777ji1btrB161ZuvfXW8zq0VeQ1vb29mTBhAtu3b+e3337j4Ycf5vbbbyc8PJxDhw7x+OOPs3r1ao4cOcLChQvZt28f7du3JzMzk8mTJ7NkyRKOHDnCypUrWb9+Pe3bt3dJbCJyvhX7znDfpxvJyXMyqlMEr97c7aKto+8e2Bw/Txs7TqSwePepMr3ea/nVklv6NCEsoGLVki6NgxnctgF5ToO3fjtQoXMVeCN/7tANPaNoGOzjknOKVLa6k+jYC9pLK9ERqesuv/xyQkND2bNnD7feemvh9ldeeYWQkBAGDBjAmDFjGDlyZGG1p6J8fX355ZdfSEhIoHfv3txwww0MHTqUN998s/D53bt3c/3119OmTRvuu+8+Jk2axP3334/NZiM+Pp477riDNm3acNNNNzFq1Ciefvppl8QmIkWtPhDPvZ+sJyfXybD24bw2rjv2UqyPE+Lnye39mwHmMK/SVnXWHIxn3aEEPG1Wl1VLHr68NQDfbjrGscSMCp1rc0wiy/edwcNq4aHBquZIzVFnmhEUdF1LynSQ5zQq1PpRRGo2q9XKiRPnN09o1qwZixcvLrJt0qRJRR6XZSjbH9/kdO7c+bzzFwgPDy9xzo2npydz5swp9euKSPltiknk7lnryXI4GdK2ATPGd8fTo/SfC997aXNmrTrE1mPJLNt3hsvaNLjoMQXVkpt6NyYiyDVzX3o2DeGSVvVZsf8Mby85wPPXdi7XeQzDYPqvZnzXdm9EVGjZGsCIuFPdqejkJzqGAcmZWjRUREREinI6Df7x3TYyHXlc2ro+b9/WEy8PW5nOUd/fi9v6NgVKV9XZeCSBlfvj8bC6fu7Lw5ebc3W+3nCMk8mZZT7eMAxemL+LpXtPY7NamDSklUvjE6lsdSbRsVkh0NvMdjR8TUQq6vPPP8ff37/YW8eOHd0dnoiUw6+74tgdm4qfp403bumOt71sSU6B+wa1wNPDysYjiaw+EH/BfV9fZHZGu6FnYxqHuLZa0rdFPfo2DyUnz8m7Sw+W6VjDMPjPgj28v/wQAM+N7USz+qVbnFmkuqgzQ9cAQnw9ScnKVec1Eamwq6++mr59+xb7nN1ur+JoRKSiDMPgjfxFNsuzWOe5wgK9uaV3FB+vPsJri/YxoFX9YvfbcjSpsFry0ODKqZY8MrQ14z9Yy5x1MTw0uCVhpWwL/Ur0Xt5ZajYyePaajtzSp0mlxCdSmepWouNn50iCKjoiUnEBAQEEBAS4OwwRcZEle06z7XjFFus81wODWzJn3VHWHkpg7cF4+raod94+b+bPzRnbrRFN6lXO3JcBLevRs2kIG48k8t6ygzxxVYeLHvPar/sKk75/julQ2GBBpKap/UPXkmKwbJ1DRNJGQnzNT1kTleiIuERZ14mQ6knXUeo6wzAK2ztXZLHOc0UG+XBDr8YAhUnDubYfT+bXXaewWmDSkMrrZGaxWArn6ny+NoYzadkX3H/Gb/t59de9APzf6PbcNbDiSZ+Iu9T+ROfEZjx+ephWp+YTkl+GTtDQNZEKKRialZFRsZalUj0UXEcNuZO6asX+M2w56prFOs/14GUt8bBaWLH/DBuPJBZ57s385GdM14a0aODvstcszmVtGtC1cRCZjjw+yJ9zU5z3lh3gv7/sAeCxK9q69Gch4g61f+iar1kq9spNVUVHxEVsNhvBwcGcOmUuiOfr64vFUrqW7U6nk5ycHLKysrBaa/9nLdWZYRhkZGRw6tQpgoODsdnKN/FapCYzDIPXXbhY57miQn25rkcjvtpwjDcW72PWXX0A2B2bwoIdsVgsMLkKOpmZVZ3W3PvJBj5dfZj7B7UgxK/oHKQPVxzihfm7AZg6vE2lzRkSqUp1INExJwB65qYWVnTileiIVFhERARAYbJTWoZhkJmZiY+PT6mTI6lcwcHBhddTpK5ZczCB9YcTXbpY57kmDWnFt5uOs2TPaX4/lkSXxsGF1ZzRnSJpHV41c/2Gtg+jQ2QgO0+m8NHKQ/x5RNvC5z5ZfZhnf9oJwCOXt+KRoa2rJCaRylb7Ex2//EQnL516PuabKlV0RCrOYrEQGRlJWFgYDkfp16ZyOBwsW7aMQYMGaahUNWC321XJkTqtMhbrPFfTen5c07Uh320+zuuL9vP3UW2Zt+0kAJMvr7qqicVi4ZGhrXjgs03MWnmYey9tQZCPndlrY3jqhx0APDi4JY8Ob1NlMYlUttqf6PiEYGDBgkGYhzkOPSFDC4aKuIrNZivTG2WbzUZubi7e3t5KdETErTYcTmDVgXjsNgsPVuJQrUmXt2LuluP8uiuOhPRsDANGdAinfWRgpb1mcUZ0iKBteAB74lKZtfIwkUHe/GPuNgAmXtqcx0a2VaVdapXaP0DeagOfEAAaWNMAVXREREQEXs8fQnZ9j8Y0CvaptNdp2cCfq7o0BGBTTBIAD19e9cPDrFZLYRXpnaUH+Nt3vwNw18Bm/GN0eyU5UuvU/kQHChsShJAMKNERERGp67YcTWJZJS/Wea6Hzxmmdnm7MDo3Dqr01yzO6M6RtGzgR6YjD8Mw22k/dVUHJTlSK9WJRMfIT3QCjVQAUrNzycl1ujMkERERcaM3FlX+Yp3nahMewC19ovD1tDHVjfNgbFYLf7uiHRYL3N6vKc9c3UlJjtRatX+ODhR2XvN1JGK1+OM0ICkjh7BA1086FBERkept+/FkFu2u/MU6/+iFazvzzDWdsNvc+znziI4R7HrmCrztakQitVudquhYM+O1aKiIiEgdV9BprSoW6zyXxWJxe5JTQEmO1AXV439bZctPdMiIL1wgK0HzdEREROqcXSdT+GVHXJUt1iki7lOnEh1LZjyh+RWdxHS1mBYREalr3vyt6hfrFBH3qBOJjlGkomOu25GQnu3GiERERKSq7T+Vynw3LNYpIu5RJxKdgmYElox4QguHrqmiIyIiUpe8uXi/2xbrFJGqVycSHcM31LyTfqYw0UlUMwIREZE649CZdH7cegKAR4ZW/WKdIlL16kSiU1DRITOBEJ+CoWtKdEREROqKGb/tx2mYi3V2auSexTpFpGrVkUQnvxmBM5dwL3Nujio6IiIidcPRhAzmbj4OwMOamyNSZ9SNRMfDC4fVXBy0gS0NUEVHRESkrnhryX7ynAaXtq5P9yYh7g5HRKqIh7sDqCo5HgHYc7KoRwoAiUp0REREarVtx5J59de9LN59CoA/aW6OSJ1SpxIdv5zTBJMKeJKgoWsiIiK10q6TKbwavZeFO+MAsFktTLy0Bb2ahbo5MhGpSnUm0cn2MBcFC3AmAw3IcjjJzMnDx9Pm3sBERETEJfbFpTL9133My18rx2qBsd0a8cjQ1jSr7+fm6ESkqtWZRCfHw+yX75WdgKctnJw8J/Hp2TT29HVzZCIiIlIRB0+n8dqiffy49QSGARYLXNk5kinD2tAqzN/d4YmIm9SZRCfbw/xFZ8mIJ8TPTlxKNonpDhprTqKIiEiNFBOfwWuL9jF38zGchrntio4RTBnemnYRWhBUpK6rM4lOQUWHjHhC/byIS8nWPB0REZEaKD07l+fm7eTrDcfIzc9whrUPY8qwNlojR0QK1aFEx5yjQ/oZQv3MRUPVeU1ERKTm+e8ve5iz7igAl7VpwNThbegaFezeoESk2qkziU5BMwIyzhDi7wloLR0REZGaJsuRx3ebjgHw+i3dubprQzdHJCLVVd1YMJRzKzrxhPqZiU6ihq6JiIjUKD9vP0lKVi6NQ3y4qnOku8MRkWqsziQ6Zys68YT4qqIjIiJSE32RP2Tt5l5RWK0WN0cjItVZnUl0CpsRONJp4O0EVNERERGpSQ6eTmPtoQSsFrihV2N3hyMi1VydSXRyrd4YNrOSE+6RBqiiIyIiUpN8ucGs5gxuG0ZkkI+boxGR6q7OJDpYLOATCkB9SyqgREdERKSmyMl18u1GswnBuN5Rbo5GRGqCupPoAPjWByCkMNFxuDMaERERKaXFu+M4k5ZDgwAvhrQLc3c4IlID1KlEx/CrB0CwkQKYc3QMw3BnSCIiIlIKX6w3h63d2LMxdludevsiIuVUt35T+JqJjl9uIgB5ToOUrFx3RiQiIiIXcTwpk6V7TwNwUy8NWxOR0qlTiY7hYyY69qwE/DxtACRqno6IiEi19vWGoxgG9G9Rj2b1/dwdjojUEHUq0Smo6JART0j+oqEJajEtIiJSbeU5Db7ekN+EoI+qOSJSenU20QnNT3RU0REREam+lu87zfGkTIJ97YzsGOHucESkBqlTiY7hZ3ZdI/0MIb75FR0lOiIitUpeXh5PPvkkzZs3x8fHh5YtW/Lss88WaT5jGAZPPfUUkZGR+Pj4MGzYMPbt2+fGqKUkX+Y3Ibi2eyO87TY3RyMiNUmZEp1p06bRu3dvAgICCAsLY+zYsezZs+eix3399de0a9cOb29vOnfuzPz588sdcIXkr6NDxpmzFR0NXRMRqVX+85//8Pbbb/Pmm2+ya9cu/vOf//Diiy/yxhtvFO7z4osv8vrrr/POO++wdu1a/Pz8GDlyJFlZWW6MXP7odGo20TvjALhZa+eISBmVKdFZunQpkyZNYs2aNURHR+NwOBgxYgTp6eklHrNq1SpuueUW7rnnHjZv3szYsWMZO3Ys27dvr3DwZWXkr6NDRvw5FR2tpSMiUpusWrWKa665hiuvvJJmzZpxww03MGLECNatWweY1Zzp06fzxBNPcM0119ClSxc++eQTTpw4wffff+/e4KWI7zYdI9dp0C0qmHYRge4OR0RqGI+y7LxgwYIij2fNmkVYWBgbN25k0KBBxR7z2muvccUVV/DXv/4VgGeffZbo6GjefPNN3nnnnXKGXU4Fc3QyE6nnawEgIT27amMQEZFKNWDAAN577z327t1LmzZt2Lp1KytWrOCVV14B4NChQ8TGxjJs2LDCY4KCgujbty+rV69m3Lhx550zOzub7Oyzfy9SUsz12BwOBw5H2T8wKzimPMfWFYZh8MW6GABu6tmw2v6sdC1rB13HmqW016lMic4fJScnAxAaGlriPqtXr2bq1KlFto0cOfKCn5pV2h8UD388sGDBIMxqVqHi07L1j7qG0C+h2kHXsWapidfp73//OykpKbRr1w6bzUZeXh7PP/8848ePByA2NhaA8PDwIseFh4cXPvdH06ZN4+mnnz5v+8KFC/H19S13rNHR0eU+trbbnwKH4j3wshp4nPid+XG/uzukC9K1rB10HWuGjIyMUu1X7kTH6XQyZcoUBg4cSKdOnUrcLzY2tkx/TKAS/6AsWswVNj+88tI4s2ct0IyDx0+5b86QlIt+CdUOuo41Q2n/mFQnX331FZ9//jmzZ8+mY8eObNmyhSlTptCwYUMmTJhQrnM+/vjjRT60S0lJISoqihEjRhAYWPYhVQ6Hg+joaIYPH47dbi9XTLXdX7/ZBpxkbI/GXDumo7vDKZGuZe2g61izFBRBLqbcic6kSZPYvn07K1asKO8pSlSZf1A8YyIhfh+XdIriP4cBTz9Gj77EdcFLpdEvodpB17FmKe0fk+rkr3/9K3//+98Lh6B17tyZI0eOMG3aNCZMmEBEhNmiOC4ujsjIyMLj4uLi6NatW7Hn9PLywsvL67ztdru9Qv+OK3p8bZWc4eDnHWYTglv6NqsRPyNdy9pB17FmKO01KleiM3nyZH766SeWLVtG48aNL7hvREQEcXFxRbbFxcUV/qEpTmX+QbH4NYD4fdS3pgOBJGY69A+6htEvodpB17FmqInXKCMjA6u1aK8dm82G0+kEoHnz5kRERLBo0aLCxCYlJYW1a9fy4IMPVnW4Uowfth4nO9dJu4gAujYOcnc4IlJDlanrmmEYTJ48mblz57J48WKaN29+0WP69+/PokWLimyLjo6mf//+ZYvUVfzMhgQBeUkAJGc6yM1zuicWERFxuTFjxvD8888zb948Dh8+zNy5c3nllVe49tprAbBYLEyZMoXnnnuOH3/8kW3btnHHHXfQsGFDxo4d697gBcMwmLPOXDtnXO8oLBaLmyMSkZqqTBWdSZMmMXv2bH744QcCAgIK59kEBQXh4+MDwB133EGjRo2YNm0aAH/605+47LLLePnll7nyyiv54osv2LBhA++9956Lv5VSyu+85uNIAppgGGayU8///AqSiIjUPG+88QZPPvkkDz30EKdOnaJhw4bcf//9PPXUU4X7PPbYY6Snp3PfffeRlJTEJZdcwoIFC/D29nZj5AKw7Xgyu06m4OlhZWz3Ru4OR0RqsDIlOm+//TYAgwcPLrJ95syZ3HnnnQDExMQUGTIwYMAAZs+ezRNPPME//vEPWrduzffff3/BBgaVKn8tHVtWAkE+dpIzHSRm5CjRERGpJQICApg+fTrTp08vcR+LxcIzzzzDM888U3WBSal8sd6s5ozuFEFw/pp3IiLlUaZExzCMi+6zZMmS87bdeOON3HjjjWV5qcrjl79oaPoZQv08Sc50aNFQERGRaiAjJ5cft5wA4ObeTdwcjYjUdGWao1Mr5Fd0yDhDiK85yVaLhoqIiLjfT7+fJC07l2b1fOnXouQ1+kRESqNCC4bWSL75vzjT4wn1M4erqaIjIiLifl/mD1u7uXcTNSGQuseZB3HbIXYbWKzg4QUePvlfvcHubX718C76nN0XrHWvdlEadS/R8Ttb0QkNMys6iRk5bgxIRERE9salsvFIIh5WC9f3VBMCqQNyMuD4RohZbd6Oroec1LKfx+oB/uHmLSDiD18jISAc/CPArwHY6tZb/7r13cI5Q9fizxm6pkRHRETEnQqqOUPbhxEWoO53Ugulx8PRNXBkFcSsgZNbwJlbdB+vQGjYDWyekJsNjkzza27WObf87c78EUnOXEg5bt4uyGJ2H/b0M6tAdp9zvvoUv81ahlTB7gNh7SG809kRVG5WBxMds700zlwiPM0EJ1GJjoiIiNtsjknki3UxAIxTEwKpLTIS4NAyOLgEjqyEM3vP3ycgEpr0h6YDoEk/COsAVlvpzu/MMxOfzERIjYO0WEg9ec79c76mnwLDCRlnzFtlC2gIEZ3MpKfga71Wpf/eXKTuJTp2b/D0h5w0wj3SAEjQ0DURERG3+P1YEnd8tI70nDz6t6jHoDYN3B2S1Bbp8eDlb85jqQq52Wal5uASOPgbnNgC/KFjcYN2ZmLTpL+Z2AQ3gfLOR7PazOqMpx8ENb7wvs48SD8D6afNapAjo4Sv595PB6ez9PFkJZtzjJKOQOoJ87Zv4dnnPbzPVnwiOkNkV/NnUInqXqIDZlUnJ40GVnMcpCo6IiIiVW/78WRu/3AdqVm59G4WwgcTemGzqgmBlFNWMhxeAQd+M5ON+H3mhP2m/aHFEGgx2HyT7aqJ+06n+ca+ILE5shpyM4vu06Cd+drNB5lv6t01pMtqM+fqBIRX/mtlpUDcDvNnE7cdYrfDqZ1mAnVis3kDM9G5f1mlhlI3Ex2/+pB0hBBLKuCnio6IiEgV2x2bwu0friU500GPJsHMvKsPfl51821JreJ0wpk9kJcDFpv5Brvga+F9j6LbPf3LN0k+zwHH1puJxoHfzIn9Rl7RfXIz4cBi8wbmXO0Wl5nJR8shF6+EABgGpMVBwiFIPGR+PbPXTKr+OAzMP8JMqApugZFl/75qOu9AM7ls2v/sNmee+XM7N/lp0LbSQ6mbv1HyGxIEGcmAH4lqLy0iIlJl9sWlMv79tSRmOOgaFcysu/vgrySn5ks6CnPvN+ejlJVPiNkVzK+B+YG0b/2z98/96sw1570c+M18nZy0oucJbWkmMC0GQ7NLIOXk2YrL4ZVmYrL9W/MG5ryRFkOwNL0U/6yTWA7+Bikx+UnN4bPJjSOj+LjtfubrFLxmg3blH4pWm1ltUL+Vees4tspetm7+VslvSBCQmww0JC07l+zcPLw8qnaClIiISF2z/1Qat7y/lvj0HDo1CuSTu/sQ6G13d1hlYxhmh6sTW7DGbqdBSh4w2t1RXZwzD46uhZ0/wolN0GoYDHjEnL9cUdu/hf89CtnJYPMy32s5c80KizP/Vng/9/zKS2aieStuwv6F+NYrWkEJ/kMzC58QCO8A/R+C3JyzFaCD+RWg+P0Qvx+P9e8zFGBXCa9jsUJgYwhtbt5CmkNUH2jUCzw8yxazVJm6mej4mYmOV04CNquFPKdBYrqDiCAlOiIiIpXl0Jl0bn1/DWfSsmkfGchn9/QlyKcGJDkpJ81WwAXzC05sNid1AzZgAJD3axqMeLb6rVOSl2tWPnb+ALt/ModgFTi6FrZ8Dlf8G9qOKt/5s1Lg58dg6xzzcaOecN37UK/lxY91Os2kJzvF/HkWTJYv+Jpx5vzteTnmXJcWg83hZ2WZc+PhCc0GmrfL/w8yk8zhZweXYBxYTF7iUWz1mmEJbXk2mSn4GtxECU0NVM3+N1aR/KFrlswEQnw9OZOWTUJ6DhFB6tsvIiJSGWLiM7j1/TWcSs2mbXgAn9/bl2DfavjGMTcbDi4tmtSkxZ6/n8UGYR1wBkRg3R+Nbe1bELsVbphZNRO+LyTPAYeW5ic38yAj/uxzXkFmUhPZBVa9aQ7PmjMOWo+EK6aVLkEpcHQ9fHeveQ6LFS79C1z2GNhKmbxarWD1BI/6Zxd0r0o+wdD+Kmh/FbkOB/Pnz2f06NHY7TUg+ZZSqZuJTsF/pvQzhPrZOZOWTaIaEoiIiFSKowkZ3PL+Gk4mZ9EqzJ/PJ/Yl1K+aJTmGAbv+BwufMNvjnstiNedeNOxu3iK7mWuD2H3IczjYMPtpeh+fieXISnj3Urjx46ITsSub02kO+zq23kxu9swzO5AV8AmBdldBh2ug+WVnKxM9JsCy/8LqGbDvF3M414BH4NKpZsvikuTlwvKXYel/zCFoQU3guveq9nsWKYW6megULBqacYaQ/E+TEtRiWkRExOVOJGVy6wdrOJ6USYv6fsy+ty/1/atoXZPSit0GCx6Hw8vNx/7h5rCogsQmotMF3/ifDO5N7shbsX97N5zeBbOuNIex9XuoYhPTk49D8tGLD+nKiDcXgzyXXwNoPwbaX21Oli+uyuLlD8Ofhu63mcPPDiyG5S/B1i9g5PNmYvTH+BMPw3f3mcPeADrfCFe+DN5B5f8+RSpJHU10Cio68YQ2MBMdVXRERERcKzY5i1veX8PRhEya1vNl9sR+hAUWM0zc6YQlL5hv7Ic+CYENqybAtNPw23Ow6RMzUfDwNisal0y5cEWjOPVaw72/wv/+BNu/gV/+YVZYrn4DvAJKf56cDLMqs+kTiFlVthiCoqDtaDNBadKv9KvQ128Nt31nDnNb8Dgkx8DXE8zqz6gXIaydud/vX8G8P5tzajwD4KpXoMtNZYtRpArVzUTHr6CiE0+Inyo6IiIirnYqNYtb31/DkfgMokJ9mDOxX/FzYQ3DTArWvm0+3jMPRr8MnW+ovDa9uTmw7l1Y+qL5ph2g47Uw/Jnzu3aVhZc/XP+B2Y3rl3/AjrkQtxNu/vTia4ac2GImN9u+PhuTxWrGU9B22bfeOS2YG5jvZ859rrRzY4pjsZjzVVoNhRXTYcWr5jyfdwZC3wfMJgbbvjb3jeprDlULaVb+1xOpAnUz0Smo6DjSCfMyS72JSnRERERc4nRqNre+v5aDZ9JpFOzD7Hv70TDYp/idl/33bJJTv43ZXvi7e2H3/+DKV89+OOkKhgF7fzGTkIQD5rbIrmbXsaYDXPMaFgv0vd+cx/P1BHPxzPcvNys7na4rum9Wspk8bPoETm49uz2kGXS/HbqNr/oFJ+0+MORx6DoOfvk/M/Fc/ab5nMUGg/8Ol0ytft3lRIpRN/+VegWA1Q5OBxGe6QAkZGjRUBERkYpKSM/htg/Wsv9UGpFB3syZ2I+oUN/id177Hvz2vHl/1IvQ6x5Y8Yo5yX3nD3BktZkgtL2i4oGd2g2/PG7OQwHwC4OhT0G3W0s/xKssmvSF+5fDN3eZc3++ucscyjbsaXP9lk0fw47vITfT3N/mac6p6TEBml1a+pbJlSW0OdwyG/ZFmwmP4YSxb5nVKpEaom4mOhaL2Xkt9SThtlRAFR0REZGKSsrIYfwHa9kTl0pYgBezJ/ajSb0Skpzfv4Kf/2reH/y4WQUBsz1x6+Ew9wE4vRvm3GxWN0a+AN6BZQsoL9ec5/L7V7BlttkhzOZpNgm49M9lP19Z+TeA27+Hxc/Cyumw5i3Y/NnZoWkADdpDzwnQ5WbwDa3ceMqj9XDzZhiVN5RQpJLUzUQHzOFrqSepZ0kDvIlXoiMiIlJuyRkObvtwLbtOplDf34s59/Wjef0SJvTvWWAmMgB97ofL/lb0+Ybd4b6lZoKwegZs/tScLzL2bbOD2IUUriHzo7lA5rlryLS7yuyGFtqi/N9oWdk8zM5mjXvD9w+aSY7dzxzG1mMCNO5VMxKImhCjyB/U3UQnf8xviJECeKuiIyIiUk4pWQ7u+Ggt24+nUM/PkzkT+9KygX/xOx9eac5dMfLMKsYV/y7+TbTd22xx3HY0fP8AJMXArKug/yS4/Enz+QK52XDgt/w1ZOZDVtLZ53xCod2V0PUWaDbQpd93mbS/ylyk89gGs0JSlk5sIlIudTfRyV9LJ8CZBISRkJGDYRhY9ImFiIhIqaVl53LnR+vYeiyZEF87n0/sS+vwEt7En9wKc8ZBbha0GQXXzLj4XJRmA+HBVWYDgU2fmBPj90XD1a9D2inY9aNZIcpJPXuMX5iZWHS4BppeUn0mzgc3qVhXNxEpk2ryP98N8juv+eUlAZCT6yQjJw8/r7r7IxERESmLjJxc7p65nk0xSQT52Pns3r60iyhh3suZ/fDpdebQraYD4caZpW+H7BVgNiVodxX8+LDZyeyjkUX3CWhoTuYv6xoyIlJr1d139X5mouORlYiXh5XsXCcJ6TlKdEREREohMyePu2etZ93hBAK8Pfjsnr50bBhU/M7Jx+DTsZBxxmznfMscs41xWbUZCQ+tgXlTzTVqgppAh6vN5KZRL/d3KhORaqXuvqvPH7pmyYgn1M+Tk8lZJGbklNwCU0RERADIcuQx8ZMNrDmYgL+XB5/c3YfOjUtIctLj4dNrIfko1GsF478F7xL2LQ3fULhxFlzzlpksaci5iJSg7n70kZ/okH6GEF9PwOz9LyIiIiXLcuRx/6cbWbH/DH6eNj6+uzfdm4SUsHMKfH69uQhoYCOz1bJ/A9cE4umrJEdELqjuJjr5Q9fIOEOon5noJGYo0RERESlJTq6Thz7fxNK9p/Gx25h5Vx96Ni1h7ZfEI/DFrXBis/nh4u3fQ3BUlcYrInVbHR66VpDoxBNSv6Ci43BjQCIiItXbk99vZ/HuU3jbrXx0Z2/6NP9DkpN+xpw7s+1rOLrW3OYZALd9Cw3aVH3AIlKn1d1Ep6Cik5lIA1+zsJWQnu3GgERERKqvlCwH320+BsA7t/Wkf8v8IeDZqbB7vpncHFhsro8DgAWaXwpD/2kuACoiUsXqbqLjEwJYAINIeyagio6IiEhJFu2Kw5Fn0DrMn8Etg88mN3t+htzMszs27A6db4SO10FgpNviFRGpu4mO1WYmO5kJRHiYi4wlqhmBiIhIsX7eFktHyyGm+WyAl+6ArKSzT4a2hC43QacboH4rt8UoInKuupvogDl8LTOB+rZ0wE6CmhGIiIicJ/3kHsbu/z9Ge62BuPyN/hHQ6XrofINZxVEHNBGpZup2ouNbH9hLKClAPVV0REREzpUaC0v/g8/GTxhtzcWJBUun67H0uAOaXWKOjhARqabqeKJjdosJMpKBemovLSIiApCVDCtfgzVvgyMDK7A4rxuHu/2Fu68f4+7oRERKpW4nOvmd1/zzkgFIzHDgdBpYrSq/i4hIHeTIgvUfwPKXIDMRAGejXkyIuZLljrb8r+8lbg5QRKT06naik7+Wjo8jCYA8p0FqVi5BvnY3BiUiIlLFnHmw9Qv47QVIMVtIU78tDH2Khbk9WH5gM41DfOjUKNC9cYqIlEHdTnTyKzq2zHj8vTxIy84lISNHiY6IiNQdexdC9FNwepf5OLARDH4cut4CNg9+/mIzAKM6RWBRwwERqUHqdqLjm7/YWcYZQv08zUQnPZvm9f3cG5eIiEhVOLQMZt9o3vcOhkv/DH0mgt0HgCxHHot2nQJgVGetiSMiNYsSHYD0eEL8PIlJyNCioSIiUncs+6/5tcM1MOZ18Aku8vSKfWdIy84lMsibbo2DzztcRKQ6s7o7ALfKH7pGRjyh+cPV1GJaRETqhGMbzYqO1QNGPH9ekgPw8/ZYAEZ2jFCjHhGpcep2ouN7NtEJyU90tGioiIjUCStfNb92vgmCo857OifXSfROM9EZrWFrIlID1fFEJ3/omtNBpJeZ4KiiIyIitd7pvbDrJ/P+JVOK3WXVgTOkZOXSIMCLnk1Dqi42EREXqduJjt0bPP0BiLSnA5CgREdERGq7la8BBrS7Chq0LXaXBYXD1sKxadiaiNRAdTvRgcKqTrhHGgCJGromIiK1WfIx+P1L8/7AKcXukpvn5Jcd+cPWOmnYmojUTEp08hsShFpSAVV0RESkllv9Fjgd0OxSiOpd7C5rDyWQmOEg1M+TPs1DqzhAERHXUKKTX9EJIQWAxAy1lxYRkVoqIwE2zjLvX/Joibv9vP0kACM6hONh01sFEamZ9Nsrv/NaoDMZUEVHRERqsXXvgSMdIrpAy8uL3SXPabBgexygRUJFpGZTouNnVnT8cpMASM504MhzujEgERGRSpCTDmvfMe9f8ihYim8wsOFwAmfSsgnysTOgZb0qDFBExLWU6ORXdLxyEgt/5ydp+JqIiNQ2Gz+GzEQIbQEdrilxt4JFQoe1D8euYWsiUoPpN1j+HB1rRjzBPuaioeq8JiIitUpuDqx+07w/8E9gtRW7m9NpFLaVHt05oqqiExGpFEp08ruukXGGED9PQPN0RESkltn2NaQcB/8I6HpLibttPppEbEoW/l4eXNK6fhUGKCLiekp08oeukR5PqK+Z6CQq0RERkdrC6YSV0837/R8CD68Sd12Q321taPswvDyKr/qIiNQUSnTymxGQEX+2oqOhayIiUlvsmQdn9oJ3EPS8q8TdDMNg/jZz2NooLRIqIrWAEp38OTo40gnzNgBVdEREpJYwDFjxqnm/90TwDixx123HkzmelImvp43BbRtUUYAiIpVHiY5XIFjNJgSNPNMBSEhX1zUREakFDi+H4xvBwxv6PnDBXQu6rQ1pG4a3XcPWRKTmU6JjsRQ2JIj0SAPUdU1ERGqJ5a+YX7vfDv4lV2kMw+Dnbeb8nFHqtiYitYQSHShsSFDfZiY68Rq6JiIiNd2JzXDwN7DYYMDDF9x118lUDsdn4OVhZUjbsCoKUESkcinRAfANBSDUkgJojo6IiNQCK6abXzvfACFNL7hrQbe1y9o0wM/Lo5IDExGpGmVOdJYtW8aYMWNo2LAhFouF77///oL7L1myBIvFct4tNja2vDG7Xv7QtSBnMqB1dEREpIaLPwA7fzDvD5xy0d3nFy4Sqm5rIlJ7lDnRSU9Pp2vXrsyYMaNMx+3Zs4eTJ08W3sLCqlFpPH/oWkCemehojo6IiNRoK6cDBrQZBeEdLrjrvrhU9p9Kw9Nm5fL21ehvs4hIBZW5Pj1q1ChGjRpV5hcKCwsjODi4zMdVifyKjk9uEgAZOXlkOfLUdUZERGqelBOwZY55/5JHL7p7Qbe1S1rXJ9DbXpmRiYhUqSobiNutWzeys7Pp1KkT//rXvxg4cGCJ+2ZnZ5OdnV34OCXFnDvjcDhwOMre+rngmJKOtXoFYQM8MuPxsFrIdRqcSs4gMsi7zK8lledi11FqBl3HmkXXqQZa8xY4HdBkADTpe9Hd5xd0W+ukbmsiUrtUeqITGRnJO++8Q69evcjOzuaDDz5g8ODBrF27lh49ehR7zLRp03j66afP275w4UJ8fX3LHUt0dHTxMSbG0AdIPL4PX5uTFKeF7xcspmlAuV9KKlFJ11FqFl3HmiEjI8PdIZTL8ePH+dvf/sbPP/9MRkYGrVq1YubMmfTq1Qsw2yn/85//5P333ycpKYmBAwfy9ttv07p1azdHXkHZqbDxY/P+JVMuuvvB02nsjk3Fw2pheIfwyo1NRKSKVXqi07ZtW9q2bVv4eMCAARw4cIBXX32VTz/9tNhjHn/8caZOnVr4OCUlhaioKEaMGEFgYMmrOpfE4XAQHR3N8OHDsdvPL8tbjgTB4TcJ9XLSo3kDluw9Q15YW0YPaVnm15LKc7HrKDWDrmPNUlBRr0kSExMZOHAgQ4YM4eeff6ZBgwbs27ePkJCQwn1efPFFXn/9dT7++GOaN2/Ok08+yciRI9m5cyfe3jW4mr/5M8hOgXqtodXwi+7+zcZjgDlsLdjXs7KjExGpUm7pIdmnTx9WrFhR4vNeXl54eXmdt91ut1fojVGJxwea5XpLZgJXDmjIkr1n+GXnKR4d0a7cryWVp6L/DqR60HWsGWriNfrPf/5DVFQUM2fOLNzWvHnzwvuGYTB9+nSeeOIJrrnmGgA++eQTwsPD+f777xk3blyVx+wSzjxY87Z5v9+DYL1wv6HcPCdf5yc6N/eKquzoRESqnFvW0dmyZQuRkdWohaVvPfNrZiIj2tXHw2phd2wqB06nuTcuEREpsx9//JFevXpx4403EhYWRvfu3Xn//fcLnz906BCxsbEMGzascFtQUBB9+/Zl9erV7gjZNfbMh6Qj4BMCXW+56O6/7TnN6dRs6vt7MrS9hq2JSO1T5opOWloa+/fvL3x86NAhtmzZQmhoKE2aNOHxxx/n+PHjfPLJJwBMnz6d5s2b07FjR7Kysvjggw9YvHgxCxcudN13UVG+oYAFMAgilYGt6rN072l+3naSyZfX8PHaIiJ1zMGDB3n77beZOnUq//jHP1i/fj2PPPIInp6eTJgwoXAdt/Dwom/uw8PDS1zjraqb5JSHbdWbWIG87hNwWuxwkXPPXnsYgLHdGmIx8nA48lwWS12iBiu1g65jzVLa61TmRGfDhg0MGTKk8HHBXJoJEyYwa9YsTp48SUxMTOHzOTk5/PnPf+b48eP4+vrSpUsXfv311yLncDurzfwELDMB0s9wZedIlu49zfxtsUp0RERqGKfTSa9evXjhhRcA6N69O9u3b+edd95hwoQJ5TpnVTfJKaugjEMMProGJzZ+TW5G1vz5F9w/KRuW7LEBFsLS9jN//v4L7i8XpwYrtYOuY81Q2kY5ZU50Bg8ejGEYJT4/a9asIo8fe+wxHnvssbK+TNXzq28mOhnxDO/QCttcCztPpnD4TDrN6vu5OzoRESmlyMhIOnQoukhm+/bt+fbbbwGIiDDnZcbFxRUZRh0XF0e3bt2KPWdVN8kpK9sPD5h3Ol3L5dfcdtH9Zyw5iMF+ejcL4a7re1f49esyNVipHXQda5bSNspxSzOCaqlgnk7GGUL8PBnQsh7L951h3raTTBrSyr2xiYhIqQ0cOJA9e/YU2bZ3716aNm0KmI0JIiIiWLRoUWFik5KSwtq1a3nwwQeLPWeVN8kpi5QTsPN7AKwDJmO9yPmcToNvNh0H4JY+TfSmzkXUYKV20HWsGUp7jdzSjKBaKkh00s8AMLqz+Snfz9tPuisiEREph0cffZQ1a9bwwgsvsH//fmbPns17773HpEmTALBYLEyZMoXnnnuOH3/8kW3btnHHHXfQsGFDxo4d697gy2Pd++DMNRcIbdj9oruvOhDPscRMArw9GNWpGjUGEhFxMSU6Bfzqm18z4gEY2TECm9XC9uMpxMTXzAXzRETqot69ezN37lzmzJlDp06dePbZZ5k+fTrjx48v3Oexxx7j4Ycf5r777qN3796kpaWxYMGCmreGTk4GbMxvo93/oVIdMme9OY/22u6N8PG0VVZkIiJup6FrBXyLJjqhfp70axHKyv3xzN9+kgcu0+KhIiI1xVVXXcVVV11V4vMWi4VnnnmGZ555pgqjqgRb50BmIoQ0g7ajL7p7QnoOC3eYneVu7q21c0SkdlNFp8Afhq7B2eFr87dp+JqIiFQzTufZBUL7PmB2EL2I7zYdw5Fn0KVxEB0bBlVygCIi7qVEp0Dh0LWzic7IjhFYLfD7sWSOJmj4moiIVCP7f4X4feAVCN0v3mnNMAy+WH8UUDVHROoGJToFCis68YWb6vt70be5uV1NCUREpFpZM8P82uMO8Aq46O6bYhLZfyoNH7uNq7s2rOTgRETcT4lOgT80IygwurO53sL8bcWvli0iIlLl4nbAwSVgsUKf+0p1yJx1ZjXnqi6RBHirfa6I1H5KdAoUrqMTD+csiDqyUwQWC2w5msTxpEw3BSciInKONW+ZX9uPgZCmF909JcvBvN/NkQnj+mjYmojUDUp0ChR0XXM6ICu5cHNYgDe9m4UC8LOaEoiIiLulnYLfvzbv95tUqkN+3HKCTEcercP86dEkpBKDExGpPpToFLB7g6e/ef8Pw9euVPc1ERFxo193xrEpJtF8sP5DyMuGRj0hqk+pjv/ynCYEFoulssIUEalWlOic69zha+e4In/42qaYJE4ma/iaiIhUnbiULCZ+uoEb31nNL1sOw/oPzCf6PQSlSFq2H09m2/FkPG1WruvRuHKDFRGpRpTonKuYtXQAwgO96dXULPX/rKYEIiJSheJSsjAMyHMa/PbNDHMZhMBG0OGaUh1fUM0Z0TGcUD/PygxVRKRaUaJzrmLW0ilQsHio2kyLiEhVSsnMzb9nMMH6MwD7m98Ktot3TsvMyeP7LccBGNe7SWWFKCJSLSnROVdBQ4L08xOdKzqZbaY3HEkkLiWrKqMSEZE6LCXLAcCEiMO0tx4lw/Bi3Ia2LN17+qLHzt92ktSsXKJCfRjQsl5lhyoiUq0o0TmXX/FzdAAig3zo2TQEw4AF2zV8TUREqkZKppnoXJ/9AwBrg0ZxJs+X+z7ZwMr9538wd67CJgS9orBa1YRAROoWJTrnKqEZQYFR+VWdeeq+JiIiVSQ1K5eWluN0yVwHWBh425MMax9Gdq6Tez5ez5qDxf/NOnA6jXWHE7Ba4IaeWjtHROoeJTrnusDQNYBR+fN01h9O4FSqhq+JiEjlS8lycJdtgfmg7Sg8w1oxY3wPhrRtQJbDyd2z1rP+cMJ5xxVUcy5vF0ZEkHdVhiwiUi0o0TnXBZoRADQK9qFbVDCGAb9o+JqIiFSBrLQkrrctNx/0ewgALw8bb9/Wk0tb1ycjJ487P1p3dp0dICfXybcbjwFws5oQiEgdpUTnXIUVneKHAcC5i4cq0RERkcrnlXoUH0sOmfZgaHZJ4XZvu4337+jFgJb1SM/JY8KH69h6NAmAX3fFEZ+eQ1iAF0PaNnBP4CIibqZE51y+oebXEubowNnua2sPxXMmLbsqohIRkTrMyDQrNTleoectEOptt/HBhF70aR5KanYut3+4lu3Hk/kif9jajb0a42HTn3oRqZv02+9cBUPXHOngyCx2l6hQX7o2DsJpwC87VNURkWou7RTkZLg7CqkAS1YSAHmeQcU+7+vpwcw7e9OraQgpWbmM/2Aty/eZradv6qUmBCJSdynROZdXIFjzF2AroSEBnG1KMF/d10SkOstKgU+vg0+uvuCQXKnePLKTADC8g0vcx8/Lg5l39aZbVDDJmQ4MAwa2qkfTen5VE6SISDWkROdcFgv45Y9lTjxc4m6jO5mJzuoD8cRr+JqIVEe5OfDlbRC3DRKPQHaKuyOScrI7zGtnKRheXYIAbzsf392HLo3Nys+E/s0qOzQRkWpNic4ftbjM/Lr50xJ3aVLPl06NAnEasHBnXBUFJiJSSk4n/DAJDi0FT38Y/zWENnd3VFJOXvmJjs03+KL7BvnY+fqB/vwyZRAjOkZUcmQiItWbEp0/6jPR/Lr9O3NsewlGa/iaiFRXi/4F274Cqwfc9Ak07ObuiKScnE4DX6eZ6Hj41yvVMV4eNtpGBFRmWCIiNYISnT9q1BMa9wanAzbOKnG3guFrqw7Ek5ieU0XBiYhcxJp3YOVr5v2r34RWQ90bj1RIanYuQaQB4FnKREdERExKdIrT537z64aPIM9R7C7N6vvRITKQPKfBwp3qviYi1cCO72HB3837Q5+Cbre4NRypuJRMB0GkA2D3v/AcHRERKUqJTnE6XAP+4ZB6Enb9WOJuo/LX1Fm0q+QhbiIiVeLwSvjuPsCA3vfCJVPdHZG4QGpWLsEWM9HBJ8S9wYiI1DBKdIrj4Qk97zLvr323xN36tzSHEWyKScIwjKqITETkfKd2wRe3QF42tLsKRr143sKSUjOlZDkItphD15ToiIiUjRKdkvS6y1xT5+haOLGl2F06NQrCbrNwJi2bY4nFLzAqIlKpko/DZ9dDVjJE9YXrPwCrzd1RiYukZDoIzB+6xgXW0RERkfMp0SlJQAR0HGveX/desbt42210aGiuV7ApJrGKAhORC6qs6uqxjbDsJdg930wuqkMVNzMJPr8BUo5D/TZwyxdg93F3VOJCaRmZBFryP0hTRUdEpEw83B1Atdbnftj2NWz7BoY/A371z9ulR5Ngth5NYtORRK7p1sgNQYoITifsj4ZVb8DJ32HsDGg/xnXnP74JPh4DjvSz23zrQ2TXoreQZlU3ZCw321wQ9NRO8I+A276FiywoKTVPVlrC2QfeQe4LRESkBlKicyGNe0HD7nBis9lqetBfztulZ9MQZq48zKaYpCoPT6TOc2TB71/C6hlwZs/Z7d/cDeO/ObsAcEUkHILZN5lJToP2YLHC6d2QcQYOLDJvBbyCILJLfuLTzWztXBnJh9MJcx+Aw8vBM8BcEDS4ietfR9zOkWaOFsiy+uFt059sEZGy0G/NC7FYoO8DMPd+s9X0wCnwhz80PZqYQwl2nkwhIycXX0/9SEUqXUYCrP8Q1r0L6afNbZ4B0OtOiD8Ie+bBF7fChB/NtbHKK/2MOf8l/TREdIY754N3IDgyIW4nxG6Fk/m3uB2QnWwmH4eXm8d7+EDXcebvkbB2Ff62C0U/CTu+M+cRjvvMTK6kVspLNys6WfZAvN0ci4hITaN35RfT8VpY+IQ5Bn73T2fn7eRrGOxDRKA3sSlZ/H4smX4ttKCbSKVJOAir34LNn0Fu/ryFwMbQ70HocUd+EpIFs2+EQ8vgsxvg7l+gQZuyv1ZOBsy+GRIOQFATs0LkHWg+Z/eBxj3NW4E8h1npKUh8Dq+EUztg40zz1nKoGWfLoWAtx/TIhEOw83tzrZyTW8xtY9+CFoPLfi6pOTLNRCfHrmFrIiJlpUTnYjy8oOedsOy/ZlOCPyQ6AD2aBjN/WyybYhKV6IhUhqPrzPk3u/4H5DcBiOgMA/5k/p+02c/ua/eGcbPNOTUnNsOnY81kJziq9K+Xlwvf3gPHN5idrm77xmxQciE2uxlTRGfofpvZrODIKlj7Nuyed3aYW73W0O8B6HoLePpd+JzxB84mN7G/n91uscGI56DLTaX/nqRmykwGIM9TiY6ISFkp0SmNXnfDilfhyEqI3Wa+kTlHjyYhZqJzJMk98YnUNpmJELMWjqyAg0uLvslvNRwGPAzNB5U88d8rwKzAfHQFxO+DT6+FuxcU21DkPIYB8/8Ce+aDzQtu/RIatC3792CxQLOB5i3xMKx7HzZ9YsYz78+w6BnzQ5TeE4smYWf2w865sOMHiNt2zvls0PxSc0HjdmPAv0HZY5Iax5qdBIBTraVFRMpMiU5pBDaE9lebY+LXvgvXvFnk6e7583Q2xyRiGAYWLdQn5fXr0+ab4Vu+gKje7o6m6qSdhphV5nCvI6sgbjuFlRsAm6dZveg/GcLal+6cfvXhju/hw5FmcvHZ9XDnT2YSdCHLXzKHmmEx16Rp0q+c39Q5QprByOdh8N9hy2xY8zYkHoKVr8GqN6HD1dCgHez80RzuVsBiMxsqdLjGXAi0NIma1Cr2HLOio9bSIiJlp0SntPrebyY62742W02f00mpU6NAPG1W4tNziEnIoGm9iwxHESlOwkHzja+RB9/eDQ+sqLXtZL1zErBs/waOrTETmzN7z9+pXitoOtC8tRwC/mFlf6GgxnD7XJh5hTmvZc4tZqXHXsK07i2zYfFz5v1RL5oJiCt5BZi/S3rfC/sWwpq3zLlEO+ae3cfqYc676TAW2l2pltF1nKfDTHSsvkp0RETKSolOaUX1hYgu5hCaTZ/AJVMKn/LysNGxUSCbY5LYFJOoREfKZ+l/zSQHICkG5v0Frn/fvTG5Wno8tp//xsgdX8OOPzwX1hGaDjCHejUZAAHhrnnNBm3M5ObjMWY3tG/vgRs/Pq+DIvsXwY8Pm/cH/gn63uea1y+O1QZtR5m32O2w/n3IiIc2V0Db0UpupJBPbgoAHn76NyEiUlblaP1TR1ks5iexAOs/MCcrn6Nn/vA1zdORcok/AL9/Yd4fOc1cq2XbV/D7V+6Ny1UMw/xeZvTGuv1rDCw4I7uZQ9HGzYbHDsFDq+DKl8xOh65Kcgo06gG3zDGHwO3+CX76kxlTgRNb4Ks7wJkLnW+Cof9y7etfSEQnGPMa3PyZ2cRASY7kMwwDX2cqAPYANboRESkrJTpl0ekG8K0HyUdh789FnurR1Ex0Nh5JdEdkUtMt/Q8YTmg9Evo/BJf9zdz+01RzIntNlhQDn98A302EjHiMsA4sa/MUeXf/as5bqarhWc0HwQ0zzSRy82cQ/ZS5PfEwfH4j5KSZ+1wzo3ztn0VcLNORRyDpAHgHKAEWESkr/TUvC7s39Jhg3l/7bpGnChYO3R2bQnp27h+PFCnZ6b3m3C+AIY+bXy/9C0T1g5xU+HbieRXEGsGZB2vegRn9YP+vZgezy58k9+5FJPm1dE9M7a+Cq98w7696HRY9a661k34KwjuZVRUPT/fEJvIHKZm5BJMGgJcqOiIiZaZEp6x632N2Qjq83FwZPV9EkDcNg7xxGrD1WJL74pOap6Ca03Y0NOxubrN5wHXvgVcgHFtnruNUk8TthA9HwIK/gSPdnHPz4EoY9Jeia964Q/fbYPiz5v3lL5kd2QIb5y8IWjubP0jNlJrlIMhiJjoWH1V0RETKSolOWQU1NofagLmA6Dm6Ny1oM51UxUFJjXVqN2z/1rw/+O9FnwtpCle+Yt5f9iLErKm6uLJSYP2HZuONI6shPb50x+Vmw+Ln4d1B5mKbngHm93DnPKjfunJjLouBj8Alj5r3vYPgtm8hMNK9MYn8QUpmDkH5Q9fwCXZrLCIiNZG6rpVH3wdg14/w+5cw7J+F6xv0aBLCvN9PsknzdKS0lv4bMMw1UiK7nv98lxthf7T5b+3bifBgFbScTjsFn11nLo57Lp9QqN/GTFjO/Rrc1KxAxawxu5YVtIpuOxqufNlch6o6GvpPs9JUvzWENnd3NCLnSU9NwdOS34lR6+iIiJSZEp3yaDrAHM8ft92c1DzAbEnbo0kwAJuPJmnhULm4uB2w43vz/uDHS95v9EtmEpF0xGxOcP0HZhfAypB4BD4da67p49cAIjrDmX1mA47MBDi6xrydy2o3q0/x+83HfmEw+r/mIpfV+f+AxQJtRrg7CpESZaWeAcCBHbvd183RiIjUPEp0ysNigT73wf8egXXvQ7+HwGqjY8MgPD2sJKTncDg+g+b1tZ6OXMCS/GpOh2vMFsMl8Q6E6z+Ej0bC9m+g9XDoOs718cTtNCs5qSchuAnc/j3Uy28akJNutsA+s9dMfAq+xu+H3MyzSU7B/Be1SBapsJw0c3RAhs2foOr8oYGISDWlRKe8Ot9otqdNOmKucN52FJ4eVro0CmLDkUQ2HUlUoiMli91mDn/EApf9/aK7E9XbnMPz2/PmQqJRfV073OroOrPFclYShHWA274rOmfF0w8iu5i3czmdkHLMTHz8Iy6csIlImeSmmXPjsjwCUZsMEZGyUzOC8vL0hZ75raZXzyjcXLieTozm6cgFLPm3+bXjtRDeoXTHXPpnaNLfbDn93UTIc7gmlv2/wifXmElO4z5m44DSTsy3Ws3qT6thSnJEXMzINP+OZNuV5oiIlIcSnYroc785P+Hwcji2ATg7T0cNCaREJ7fC7p8Ay/md1i7EastvOR0Ex9bD0hcrHsv2b2H2OHBkQMuhcMf3GnYmUk0YmUkA5NoD3RuIiEgNpUSnIoIaQZebzfsrXgXOLhy6Ny6VNC0cKsUpqOZ0vgEatC3bscFN4Kr8ltPLX4Ijq8ofx/oP4Zt7wOmAjtfBLV+YQ9REpFqwZpkfmOV5B7s3EBGRGkqJTkUNfASwmJ/Qn95DWKA3jYJ9zIVDjya5OzqpDNu+gVlXwfbvwDDKduzxTbBnPliscNnfyvf6nW+Arreai4x+dx/kf+pbaoZhLkA6bypgQK97zE5uHp7li0dEKoVHTrJ5x1utpUVEykOJTkU1aHt2AdGVrwNn5+lo+FottOYd+PYec7jiN3fBl7dBysnSH19YzbmpYgtojn4RQpqbbZ9f7wYfXw0L/gFbZsPJ382FO4vjdMIv/4DFz5mPBz1mrnVjtZU/FhGpFPacFAAsvsHuDUREpIZS1zVXuORRs6Lz+5cw5HF6NAnmf1tPsEkNCWoPw4Cl/4El08zHLQbD4RXmdT+0HEY+B91vv/C6Mcc2wL5fwGKDyx6rWDxeAXDDh/DZDeb6NoeWmrcCVg9zMc/wTmaTgPCOZje1Rc/A1jnmPlf8G/o9WLE4RKTSeOeaFR2bbz03RyIiUjMp0XGFxr2g2aXmp/yr36JHR/NN7OajSTidBlar1j+o0ZxO+OVxWPuO+XjI/8Ggv5oLfv44GU5shh8fhm1fw5jXS277XJAkdR13dn2aimjUE6buhFO7zMVr43ZA7HaI2wZZyXBqp3nb9lXR4yw2GPtW5azFIyIu45uXCoDdX0PXRETKo8xD15YtW8aYMWNo2LAhFouF77///qLHLFmyhB49euDl5UWrVq2YNWtWOUKt5i551Py6cRYdgnPxtltJynBw8Ey6e+OSisnLhR8mnU1yRr1oVmMsFrNScs+vMOI58PCBQ8vg7QFmu3FnXtHzHF1ntnG22GDQX1wXn90HGvWAHnfAqP/AXfPgb0fg0R1wy5dw+RNmC+t6rc15QV6BMO5zJTkiNYCv00x0vAJU0RERKY8yJzrp6el07dqVGTNmXHxn4NChQ1x55ZUMGTKELVu2MGXKFO69915++eWXMgdbrbW8HCK6gCMd+8YP6dIoGEDD19wlNwd2zIXkY+U/hyMLvroDts42E5Rr34O+9xfdx+YBAx6GB1eaVT1HhjkH5sMRELfz7H6/vWB+7XYrhLYof0ylYbFAUGNoe4VZebpxFjy8AR4/Dn89AG1HVe7ri0iFZefmEUQaAD6BDdwcjYhIzVTmoWujRo1i1KjSv1F65513aN68OS+//DIA7du3Z8WKFbz66quMHDmyrC9ffVksZlXnm7tg7Tv06TicdYdhc0wiN/WKcnd0dc/iZ2HV6+ZclU43mMlIWRa0zE6FObeYwxFtXnDTxxdOEOq1hDt+hM2fwMIn4fgGeHeQWb1p0h8O/mbGMuivFf/eysvT132vLSJlkpqVSxDmiACfIFV0RETKo9Ln6KxevZphw4YV2TZy5EimTJlS4jHZ2dlkZ5/tGpWSYnaecTgcOBxlXw2+4JjyHFsmrUfjEdIcS+IhRuUs5E06s/FwYuW/bh1R6uuYmYjH+g+xADhz4fcv4PcvcLa4HGf/yRhNL71w04CMBGxf3Iz15GYMTz/ybvoco+klUJrr2GU8NLsc24K/YN33CyyZhmGxYQHyut6K079h6c5Ti1XZ/0dxCV0n90hNz6S+JRMAmxbxFREpl0pPdGJjYwkPDy+yLTw8nJSUFDIzM/Hx8TnvmGnTpvH000+ft33hwoX4+pb/U+no6OhyH1taTf0H0y3xEM12vYsH09l3KpVvf5yPj9o+uMzFrmObk9/T3pFOsk8TtkTdTatT82mYtB7rwcVYDy4myacZ+8JHczK4N4alaFtl75wE+h/4L4FZx8m2+bOm+V9I2pECO+aXLUi/W2nYrCVdjn2KV24qTouNRTndyJxfxvPUYlXx/1EqLiMjw90h1EnpyfFnH3gHuS8QEZEarFq+/X788ceZOnVq4eOUlBSioqIYMWIEgYGBZT6fw+EgOjqa4cOHY7fbXRnq+XIvx3hzHn7pp7gzYAMfpPYjvENfLmmloQcVVarrmJOOx5tTAPAb+QQDOl4HTCY38TDWtW9j3Tqb4MzD9D78FkZQE5x9H8TZ9Vbw9IOEg3jMvh5L1nGMgEist37LgPptKhDxlZDxJ/LWvYsR3okh7a+uwLlqjyr9/ygVVlBRl6qVlWomOun44merln+qRUSqvUr/7RkREUFcXFyRbXFxcQQGBhZbzQHw8vLCy8vrvO12u71Cb4wqenwpXwT6T4Jf/8k9lh/5kD78fjyVIe0jKvd165ALXseNc8x1ZUKa49H5erNZAEBYaxjzClz+f7D+A1j3LpbkGGwLH8e2/EVzDZytX0D6KQhtgeWOH7AHN6l4sEERMPyfFT9PLVQl/x+lwnSN3COnINGxBeDn5lhERGqqMnddK6v+/fuzaNGiItuio6Pp379/Zb+0+/S6G7yCiMw5zFDrZjaq81rVyM2BVW+a9wc+cjbJOZdfPRj8N5iyHa58GUKaQ2ai2bgg/RSEd4a7fwFXJDkiIuXkSDMTnQxb2UcxiIiIqcyJTlpaGlu2bGHLli2A2T56y5YtxMTEAOawszvuuKNw/wceeICDBw/y2GOPsXv3bt566y2++uorHn30Udd8B9WRdyD0vgeAhzx+YHNMAk6n4eag6oDt30DKMfAPh663XnhfT1/ofS88vBFu/BiaDoQ2o+DOn8A/rGriFREpQV6G+QFZjocSHRGR8ipzorNhwwa6d+9O9+7dAZg6dSrdu3fnqaeeAuDkyZOFSQ9A8+bNmTdvHtHR0XTt2pWXX36ZDz74oHa1li5OvwcxbF70sO6nffZ2DpxOc3dEtZvTCSumm/f7PQR279IdZ7VBx7Fw13y49QvwCa6kAEVEyiAzP9HxVCMCEZHyKvMcncGDB2MYJVcnZs2aVewxmzdvLutL1Wz+YVi63wYbPuQhjx/ZFHMTrcMD3B1V7bVnPpzZA15B5tBBEZEazJKZBECulxIdEZHyqvQ5OnXagIdxYmWwbSsnd693dzS1l2HAilfM+33uNYcOiojk+/e//43FYimyfltWVhaTJk2iXr16+Pv7c/3115/XOMedrNnJABjewe4NRESkBlOiU5lCmxPXZDQAXY/MdHMwtdjh5XB8I3h4Q98H3R2NiFQj69ev591336VLly5Ftj/66KP873//4+uvv2bp0qWcOHGC6667zk1Rns/uSALA4hPi3kBERGowJTqVzHuwuR7QIMcKUk/sdXM0tdTy/GpO99vBv4F7YxGRaiMtLY3x48fz/vvvExJyNmFITk7mww8/5JVXXuHyyy+nZ8+ezJw5k1WrVrFmzRo3RnyWl8Ncv8jqG+rmSEREai6tQlbJQlr0ZI2tB/3yNpG6+FUCbnvb3SHVLic2w8HfwGKDAQ+7OxoRqUYmTZrElVdeybBhw3juuecKt2/cuBGHw8GwYcMKt7Vr144mTZqwevVq+vXrd965srOzyc7OLnxcsJCqw+HA4XCUObaCY0o61jvXPL/FJ6hc55eqc7FrKTWDrmPNUtrrpESnCqxvfCf9jmwi7MA3kPovCAh3d0i1x4pXza+db4CQpu6NRUSqjS+++IJNmzaxfv358yNjY2Px9PQkODi4yPbw8HBiY2OLPd+0adN4+umnz9u+cOFCfH19yx1ndHR0sds756aABfYePsHB+fPLfX6pOiVdS6lZdB1rhoyMjFLtp0SnCgS3HcTGQ63pad1nvjHv9wA488BwmjdnHhh5Z78ahnnfwxMiupgtkOV8Z/bDzh/N+wOnuDUUEak+jh49yp/+9Ceio6Px9i5lq/mLePzxx5k6dWrh45SUFKKiohgxYgSBgWVvgOJwOIiOjmb48OHY7fbznk/ZZFaoe/a/jEZte5U/cKl0F7uWUjPoOtYsBVX1i1GiUwV6NAvl1dyr+cDzZVj7tnkrrbCOMPxpaDUMLJbKC7ImWvUaYJgLfYZ3cHc0IlJNbNy4kVOnTtGjR4/CbXl5eSxbtow333yTX375hZycHJKSkopUdeLi4oiIiCj2nF5eXnh5eZ233W63V+hNUXHH5+U5CcBcey0wNFxvumqIiv5bkOpB17FmKO01UqJTBdqGB7Daoxe/5XXlMs/dWK02c06J1QoWa/592zlfLeb99NNwagd8fgM0uxSGPwONelz8BeuClBOwZY55/5JH3RuLiFQrQ4cOZdu2bUW23XXXXbRr146//e1vREVFYbfbWbRoEddffz0Ae/bsISYmhv79+7sj5CLSUpMJsuQB4Bdc383RiIjUXEp0qoCHzUqXxqHcdfBvTBvTmVv6NCndgRkJ5vowa98zWyi/PwQ6XgdDn4TQFpUbdHW3egY4HdB0IDTp6+5oRKQaCQgIoFOnTkW2+fn5Ua9evcLt99xzD1OnTiU0NJTAwEAefvhh+vfvX2wjgqqWnnyaICDH8MDLRwtNi4iUl9pLV5EeTYMB2HQksfQH+YbCiOfg4Q3Q9RbAAju+gzf7wPzHIP1MpcRaZU7tNltD7/3FnJdUWpmJsCF/XSJVc0SkHF599VWuuuoqrr/+egYNGkRERATfffedu8MCICMlHoBUi7+GLIuIVIAqOlWkRxNzDYdNMWVIdAoEN4Fr34H+kyD6n3BgEax7F7bMhoF/gv4PgaefiyOuJE6nGf+at82vBcI7w6WPQoexF22+YN3wITjSzWNaDbvgviIiAEuWLCny2NvbmxkzZjBjxgz3BHQB2fmJTprVn3pujkVEpCZTRaeKdM9PdA6cTudoQula4p0nojPc/h3c8QNEdoWcVPjtOXi9B2ycBXm5rgvY1XLSYf0HMKOPOefowCJzflLzy8DTH+K2wTd3w5u9YdOnkJtT7GlsedlY179nPrhkij7tFJFax5FmJjoZtrJ3cxMRkbOU6FSRUD9PLm1tTip9fdG+ip2sxWCYuASu/xCCm0JaLPzvT/D+YIg/UNFQXSvpKEQ/Ba+0h3l/hvh94BUI/SfDI5thwo8wZRsMfhy8gyHhAPw4GV7vBmvegZyiSWHT+KVYMhMgpLlZ/RERqWVy083Kf5YSHRGRClGiU4WmDm8DwLebjnHwdFrFTma1motkTl4PV/wbfEIgdhu8N/js2jLuYhgQsxa+mgCvdYWVr0FWspmcjHoRpu6Ekc9DSDNzf99QGPx3eHS7OSfJPxxSjsOCv8H0zrD8ZfP4vBxanspfOG/gI2DTyEsRqX2MjAQAcjyV6IiIVITeKVah7k1CGNoujEW7T/Haon28Nq57xU/q4QX9HjSrG9/cBTGr4avbzYrJsH+BrRJ7wTsyITUW0k5BWtzZ2/5FcGLT2f2aD4J+D0HrEReef+MVAAMeht4TYcvnsHI6JMXAomdgxWvYmvTD7kjA8AvD0vXWyvu+RETcyJKVBIDDM9itcYiI1HRKdKrYo8PbsGj3KX7ceoKHBreibYSLWocGRsKE/8Gv/4LVb5q34xvhhpnmc+WVlQzbvjaHxP0xqcm+wKq0Ni/ociP0fRAiOpW8X3Hs3tD7HugxAbZ/a7bYPr0b675fAHD2fQCb3TWrnYuIVDcFiY7TK8i9gYiI1HBKdKpYp0ZBjO4cwfxtsbwavZd3bu/pupPb7OaQsKi+8MMks7rz7qVww0dmVaUsUk6YndE2zDSbHpTEw9scauYfDgH5X0NbQOebwL9BBb8fD+h6M3S+EfbMw7nydZISEwjocRcX7ssmIlJzeeQkm3d8QtwbiIhIDadExw0eHdaGn7fHsmBHLNuPJ9OpkYs/tetwNYR3hK/ugLjt8Mk1MOT/4JKp5tyeC4nbCaveMKs4Toe5rX5baDMSAiLOJjUFiY1XYOV3PrNaof0Y8lpdwfL58xntpQX0RKT28nKYiY7FV4mOiEhFKNFxg9bhAVzTtSHfbznBK9F7+ejO3hU6X26ek//+soeIIG/uGtjc3FivJdz7K8z7C2z5DBY/C0fXmevx+IYWPYFhwJGVZtOAfQvPbm86EAY8kj+3Rn0rRESqgleuWUX38FOiIyJSEUp03ORPw9rwv99Psnj3KTYeSaRn0/L/QXvxlz28t+wgAJe2bkCrMH/zCbsPjJ0BTfrB/L/Avl/gvcvgxo+hUQ9w5sGu/5kJTmHzAAu0H2MuRNq4VwW/SxERKSvfPHP+o6e/lgsVEakIfUzvJs3r+3FDj8YAvBq9t9znmff7ycIkB+DDFYfO36nH7XBPtNnOOSkGPhoJP/8d3ugJX08wkxwPb+h1Nzy8EW7+VEmOiIib+DvNio5XgBIdEZGKUKLjRg8PbYXdZmHF/jOsPhBf5uP3n0rlr99sBeCyNubE/283HeNMWvb5O0d2gfuWQtsrIS8H1r4NiYfMya6DHoMp2+GqV80hbyIi4h55DvzIBMA7sL6bgxERqdmU6LhR4xBfxvVuAsAr0XswDKPUx6ZmObjv041k5OTRr0UoH07oRbeoYHJynXyy+kjxB/kEw7jPYeQL0Li3uXjnozvg8v+reIc0ERGpMCMzqfC+f5AqOiIiFaFEx80mX94KLw8r6w8nsnzfmVIdYxgGj33zOwdPpxMR6M0bt/TAw2Zl4qUtAPh09WEyc/KKP9higf6TzEYFfe8HTz9XfSsiIlJBmSnm34EUw5dAP60XJiJSEUp03Cw80Jvb+zUF4OWFpavqvL/8ID9vj8VuszBjfA8aBHgBMLJjOFGhPiRmOPhm07FKjVtERFwvM8UcxpyMHz52rRgmIlIRSnSqgQcGt8TX08bWY8n8uuvUBfdddeAM//55NwBPXdWhSLc2D5uVe/LbS3+4/CB5ztIPhRMREffLyq/opFr8sVT2GmUiIrWcEp1qoL6/F3cOaAbAK9F7cZaQoJxMzuTh2ZtxGnBd90bcll8JOteNvaII8rFzOD6D6J1xlRm2iIi4WHaqWdHJsAa6ORIRkZpPiU41cd+gFgR4ebDrZAo/b4897/mcXCcPfb6J+PQc2kcG8vy1nYv9tM/Py4Pb+pkNDj5YfvC850VEpPrKTU8EIMsjwM2RiIjUfEp0qolgX0/uudQcdvbqr3vPG3b23LydbI5JItDbg3du64GPZ8ljtyf0b4anzcqGI4lsPJJYqXGLiIjrODMSAMjyUEVHRKSilOhUI3df0pxgXzv7T6Xx49bjhdu/23SssGX09HHdaFrvwp3SwgK9uaZbQ0BVHRGRmsTIMD+ccngGuzcQEZFaQIlONRLobef+QeaCndN/3Ycjz8nOEyn8Y+42AB4Z2prL24WX6lwTB5mtphfsiOVIfHrlBCwiIi5lyU4GwOkV5OZIRERqPiU61cyEAU2p7+/JkfgMZq48xAOfbSTL4eSyNg3409DWpT5Pm/AABrdtgGHAhysOVWLEIiLiKh7ZSQA4vYPdGoeISG2gRKea8fX04MHBrQB4Yf5uYhIyaBziw2vjumGzlq3V6H35C4h+teEoiek5Lo9VRERcy55jVnQsPqFujkREpOZTolMNje/bhIhAc0VsTw8r79zWk2BfzzKfp3/LenRsGEiWw8lna464OkwREXExr9wUAGx+we4NRESkFlCiUw152208eVUHQv08+e8NXejUqHxjtS0WC/flz9X5ePURshx5rgxTRERczCc/0fHwr+fmSEREaj4lOtXUlV0i2fTkcK7p1qhC5xndOZKGQd6cScvmhy3HL36AiIi4h2Hg50wFwFOJjohIhSnRqeXsNit3DTTX53l/+SGcf1ifR0REqomcdDwwK+8+gUp0REQqSolOHTCuTxQBXh7sP5XGkr2n3B2OiIgUJ9NcQyfb8MDPP8DNwYiI1HxKdOqAAG87t/RtAsB7y7SAqIhItZSf6CTjT6BP2RvQiIhIUUp06og7BzTDw2phzcEEfj+W5O5wRETkD3LSEwBINvwI9LG7ORoRkZpPiU4d0TDYhzFdGwLmXB0REaleslLOAJCEHwFeHm6ORkSk5lOiU4fce6nZlGD+tpMcS8xwczQiInKu7JR4ANIsAVjLuEC0iIicT4lOHdKxYRCXtKpPntNg5srD7g5HRETO4Ug35+hk2dSIQETEFZTo1DEFVZ0v1sWQnOlwczQiIlIgL92s6GTaA90ciYhI7aBEp465rE0D2oYHkJ6Txz2z1pOYnuPukEREBDAyzIqOwx7k5khERGoHJTp1jMVi4YXrOhPg7cGGI4lc//YqjsSnuzssERHJSgIg10uJjoiIKyjRqYN6Ng3h2wcH0CjYh4Nn0rnurVVsOZrk7rBEROo0W3YSAIZXsFvjEBGpLZTo1FFtwgP47qEBdGwYSHx6DuPeW030zjh3hyUiUmd5ZCcDYPiEuDkSEZHaQYlOHRYe6M2X9/fnsjYNyHI4uf/TDXyy+rC7wxIRqZM8c1MAsPoq0RERcQUlOnWcv5cHH0zoxbjeUTgNeOqHHbwwfxdOp+Hu0ERE6hSf/ETHwy/UzZGIiNQOSnQEu83KtOs689eRbQF4b9lBHv5iM1mOPDdHJiJSR+Q58HaaCznb/eu5ORgRkdpBiY4AZje2SUNa8erNXbHbLMz7/SS3f7iWpAy1nxYRqXRZyYV3vQNV0RERcQUlOlLEtd0b8/HdfQjw9mD94USue3sVMfEZ7g5LRKR2yzTX0EkxfAn08XZzMCIitYMSHTnPgJb1+fbBATQM8ubg6XSue3slvx9LcndYIiK1V36ik2T4EeDt4eZgRERqByU6Uqw24QHMnTSQDpGBnEnL4Z6PN5CS5XB3WCIitVNmEgDJ+BHoY3dvLCIitYQSHSlReKA3Xz3Qnxb1/Tidms2LC3a7OyQRkVrJmZEAQJLhT6AqOiIiLqFERy7I38uD56/tDMDna2PYeCTRzRGJiNQ+2anxACTjT4C3KjoiIq6gREcuqn/LetzQszGGAf/4bhuOPKe7QxIRqVVy0syKTprFH08P/WkWEXGFcv02nTFjBs2aNcPb25u+ffuybt26EvedNWsWFoulyM3bWx1lapr/G92eUD9P9sSl8sHyQ+4OR0SkVslNNys6mR6Bbo5ERKT2KHOi8+WXXzJ16lT++c9/smnTJrp27crIkSM5depUiccEBgZy8uTJwtuRI0cqFLRUvRA/T/5vdHsAXlu0Vy2nRURcyJluDgvOsQe5ORIRkdqjzInOK6+8wsSJE7nrrrvo0KED77zzDr6+vnz00UclHmOxWIiIiCi8hYeHVyhocY/rejRiQMt6ZDmcPPHDdgzDcHdIIiK1gpGVBIDDUxUdERFXKVNrl5ycHDZu3Mjjjz9euM1qtTJs2DBWr15d4nFpaWk0bdoUp9NJjx49eOGFF+jYsWOJ+2dnZ5OdnV34OCUlBQCHw4HDUfYWxwXHlOdYKerpMe248s3VLNt7mrmbjjKmS2SVvbauY+2g61iz6DpVDVt+ouP0CnZrHCIitUmZEp0zZ86Ql5d3XkUmPDyc3buLbz3ctm1bPvroI7p06UJycjIvvfQSAwYMYMeOHTRu3LjYY6ZNm8bTTz993vaFCxfi6+tblpCLiI6OLvexctawSAvzj9p46vvfyT68Gd8q7oSq61g76DrWDBkZGqZaFWzZSQAYPiHuDUREpBap9Leo/fv3p3///oWPBwwYQPv27Xn33Xd59tlniz3m8ccfZ+rUqYWPU1JSiIqKYsSIEQQGlr2s73A4iI6OZvjw4djtattZUcNynex5azUHTqezxWjKc6NLrs65kq5j7aDrWLMUVNSlctkd5s/Z4hPs3kBERGqRMiU69evXx2azERcXV2R7XFwcERERpTqH3W6ne/fu7N+/v8R9vLy88PLyKvbYirwxqujxYrLbYdp1Xbjp3dV8ueE4N/RqQu9moVX4+rqOtYGuY82ga1QFDANvRzIANt+q+10qIlLblakZgaenJz179mTRokWF25xOJ4sWLSpStbmQvLw8tm3bRmRk1c3tENfr0zyUcb2jAHNtnZxcra0jIlIuOWnYyAPAHqBER0TEVcrcdW3q1Km8//77fPzxx+zatYsHH3yQ9PR07rrrLgDuuOOOIs0KnnnmGRYuXMjBgwfZtGkTt912G0eOHOHee+913XchbvH3Ue2o7+/JvlNpvLfsgLvDERGpmTKTAMg2PPDxDXBvLCIitUiZ5+jcfPPNnD59mqeeeorY2Fi6devGggULChsUxMTEYLWezZ8SExOZOHEisbGxhISE0LNnT1atWkWHDh1c912IWwT7evLkVR340xdbeH3xfq7s0pDm9f3cHZaISM2Saa6hk4w/gT4aKigi4irlakYwefJkJk+eXOxzS5YsKfL41Vdf5dVXXy3Py0gNcHXXhnyz8RjL953hie+38dk9fbFYLO4OS0Sk5shvLZ1k+CnRERFxoTIPXRM5l8Vi4bmxnfDysLJyfzxzNx93d0giIjVLYUXHj0DvKu7XLyJSiynRkQprWs+PPw1rDcBz83aRmJ7j5ohERGqQ/EQnyfAn0FsVHRERV1GiIy4x8dIWtA0PICE9hxfm73J3OCIiNYaR34xAc3RERFxLiY64hN1m5YXrOgPw9cZjbI5JdHNEIiI1gyMtHoBkw08VHRERF1KiIy7Ts2kI1/doDMC7Sw+6ORoRkZohNy0BgBT88bbrz7KIiKvoN6q41P2XtQDgl52xHD6T7uZoRESqv7wMswKebQ9U10oRERdSoiMu1SY8gCFtG2AY8MEKVXVEpOpNmzaN3r17ExAQQFhYGGPHjmXPnj1F9snKymLSpEnUq1cPf39/rr/+euLi4twSr5HfjMDhGeiW1xcRqa2U6IjL3TeoJQBfbzhGfFq2m6MRkbpm6dKlTJo0iTVr1hAdHY3D4WDEiBGkp5+tMj/66KP873//4+uvv2bp0qWcOHGC6667zi3xWvITnVzPYLe8vohIbaWG/eJy/VqE0qVxEL8fS+bTNUeYMqyNu0MSkTpkwYIFRR7PmjWLsLAwNm7cyKBBg0hOTubDDz9k9uzZXH755QDMnDmT9u3bs2bNGvr161el8dqykwFw+gRX6euKiNR2SnTE5SwWCxMvbcHDczbzyeoj3D+oJT6eNneHJSJ1VHKymUiEhoYCsHHjRhwOB8OGDSvcp127djRp0oTVq1cXm+hkZ2eTnX22Qp2SkgKAw+HA4XCUOaaCYxwOBx45Znx4BZXrXOJe515Lqbl0HWuW0l4nJTpSKUZ1iqBxiA/HEjP5dtMxbuvX1N0hiUgd5HQ6mTJlCgMHDqRTp04AxMbG4unpSXBwcJF9w8PDiY2NLfY806ZN4+mnnz5v+8KFC/H19S13fL8u/Jmr88whdScSM5k/f365zyXuFR0d7e4QxAV0HWuGjIyMUu2nREcqhYfNyj2XNOfp/+3kg+UHuaVPE2xWdRMSkao1adIktm/fzooVKyp0nscff5ypU6cWPk5JSSEqKooRI0YQGFj2JgIOh4Po6GiGXdIbtpjbmrXuyOjRHSoUp1S9gms5fPhw7Hatg1RT6TrWLAVV9YtRoiOV5qZeUUz/dR+H4zOI3hnHFZ0i3B2SiNQhkydP5qeffmLZsmU0bty4cHtERAQ5OTkkJSUVqerExcUREVH87ykvLy+8vLzO22632yv0psiemwZAiuFLkL+v3mDVYBX9tyDVg65jzVDaa6Sua1Jp/Lw8uK1fEwDeW3bAzdGISF1hGAaTJ09m7ty5LF68mObNmxd5vmfPntjtdhYtWlS4bc+ePcTExNC/f/8qjdWSmQRAkuFHgLc+exQRcSUlOlKpJgxohqfNyqaYJDYeSXB3OCJSB0yaNInPPvuM2bNnExAQQGxsLLGxsWRmZgIQFBTEPffcw9SpU/ntt9/YuHEjd911F/3796/yjmtkJQGQhD+B3voUWUTElZToSKUKC/Dm2u6NAHh3qRYQFZHK9/bbb5OcnMzgwYOJjIwsvH355ZeF+7z66qtcddVVXH/99QwaNIiIiAi+++67qg82fw2dZMOPQB8lOiIirqQ6uVS6iYOa8+WGo0TviuPg6TRaNPB3d0giUosZhnHRfby9vZkxYwYzZsyogohKZsmv6CTjT30NXRMRcSlVdKTStQoLYGi7MAwDPlhxyN3hiIhUH/lzdJINPw1dExFxMSU6UiXuG9QCgG82HuNMWvZF9hYRqSMK5+j4Eeijio6IiCsp0ZEq0ad5KF0bB5GT6+ST1UfcHY6ISLXgTDebtCQZ/pqjIyLiYkp0pEpYLBbuG9QSgE9XHyYzJ8/NEYmIuF9ehtmMIAU//D1V0RERcSUlOlJlRnYMJyrUh8QMB99sPOrucERE3M7I77qWZQ/CarW4ORoRkdpFiY5UGQ+blXsvMefqfLDiEHnOi3dGEhGp1fKbEeR6Brk3DhGRWkiJjlSpG3s1JtjXzpH4DBbuiHV3OCIibmXLTgbA6aVER0TE1TQgWKqUr6cHt/dryhuL9/PusoNc0SkCi0XDNUSkDjIMPHLMRMfwCXFzMCK1U15eHg6H46L7ORwOPDw8yMrKIi9P84jdzW63Y7PZKnweJTpS5e7o34x3lx1ky9EkNhxJpHezUHeHJCJS5TycWViNXACsPvo9KOJKhmEQGxtLUlJSqfePiIjg6NGj+gC2mggODiYiomIfiCvRkSrXIMCL63s0Ys66o7y79GClJTo5uU48PTQ6U0SqJ3teOgDZhgfePn5ujkakdilIcsLCwvD19b3om2Wn00laWhr+/v5YrXrv4E6GYZCRkcGpU6cAiIyMLPe5lOiIW9x7aQvmrDvKr7viOHA6jZYN/F16/o9WHOI/C3bzyNDWTBrSyqXnFhFxBXuumegk40+gr6eboxGpPfLy8gqTnHr16pXqGKfTSU5ODt7e3kp0qgEfHx8ATp06RVhYWLmHselKilu0bODPsPbhAPz7590u7cD287aTPPPTTrJznby0cA8r9p1x2blFRFzFM7+ik2T4Eeitzx1FXKVgTo6vr6+bI5GKKLh+pZljVRIlOuI2D1/eCrvNQvTOOP7543YMo+LJzuaYRKZ8uQWARsE+GAY8+tUWTqdmV/jcIiKuVDB0LRk/Arztbo5GpPbRXJuazRXXT4mOuE3XqGBeuakbFgt8tiaGlxfurdD5jiZkMPGTDWTnOhnStgG/PDqINuH+nE7NZupXW3Bq3R4RqUY8c9MASDL8CfRRRUdExNWU6IhbjenakOfGdgLgzd/28/6yg+U6T3Kmg7tnredMWg7tIwN549Ye+Ht58OatPfC2W1m+7wzvLS/fuUVEKsPZio4/garoiIiLNWvWjOnTp7s7DLdSoiNuN75vUx67oi0Az8/fxVfrj5bpeEeek0mfb2LfqTTCA7346M5e+HuZn462CQ/gn2M6AvDSL3vYFJPo2uBFRMrJs6AZgaGhayJiGjx4MFOmTHHJudavX899993nknPVVEp0pFp48LKW3D+oBQB//+53Fmw/WarjDMPgye+3s2L/GXw9bXw4oTeRQT5F9hnXO4qrukSS6zR4ZM5mkjPLP6mtKrhirpKIVH/2c5sRaOiaiJSCYRjk5uaWat8GDRrU+YYMSnSkWrBYLPx9VDtu7hWF04BH5mwpVbe0d5cd5Iv1R7Fa4I1butOpUVCx537hus5EhfpwLDGTf3y3zW3JRJYjj0Nn0lm1/wxfbzjK64v28fh3v3PHR+sY/spSOv3zF7o8vZDfjyW5JT4RqTqFXdc0dE1EgDvvvJOlS5fy2muvYbFYsFgszJo1C4vFws8//0zPnj3x8vJixYoVHDhwgGuuuYbw8HD8/f3p3bs3v/76a5Hz/XHomsVi+f/27js+qirv4/hn0qYkmYQQUighIBGkurQ8gGUVNIIggiKLKIKoiy6uiqiLSgcLClIW5REXO4+IArqCSBEkdKWIgqEZCCUhhJaEtEnmPn+EZM2SQBLSZvi+X695JXPvuXd+l8Pck989557Le++9R58+fbDZbERFRfH111+XKra8vDyGDh1Ko0aNsFqtNG3alBkzZlxUbt68ebRo0QKz2Ux4eDjDhw8vXHf27Fn++te/EhoaisVioWXLlnzzzTfl+8cqJV1CkhqjICFJy3aw7JckHvv4Jz55JJq2EbWKLb/sl0Re+zYOgNE9m9P1wnTVxbFbvJk1oC33vrORpb8k0mVrMPdHR1TKcfzRybRspq3cyy/HzpF4NotT53NKtd3IhT/z7ydvwOxVvnnjRaTm8/rD0DW7VYmOSGUyDINMR16J651OJ5k5eXjl5Fb4c3Ss3p6lmkFsxowZ7Nu3j5YtWzJhwgQAdu/eDcA//vEP3nzzTRo3bkytWrU4cuQIPXr0YPLkyZjNZj766CN69erF3r17iYgo+e+b8ePHM2XKFN544w1mzZrFwIEDOXz4MEFBl354u9PppH79+ixcuJDatWuzceNGHnvsMcLDw7nvvvsAeOeddxgxYgSvvfYa3bt359y5c2zYsKFw++7du5OWlsYnn3zCNddcw549e8r9fJzSUqIjNYqnh4m3+l9PWtZPxO5PYcj7P/L5XzvRNMy/SLkdCWd45sI00oM7RzKkS6PL7vv6BoE8f0dTXlkWx/h/76Ztw0Cahdkr4zAAWP5rIi8u/pXT/5XcWL09CQ+0UC/QSniAhfAAa/7vgRYCrT4M+WAr+06kM3vNQUbcdm2lxSci1cvrwqxr5/DDX8/REalUmY48mo/5rlo+e8+EGGw+l/+OBwQE4OPjg81mIywsDIC4uPwLuhMmTOC2224rLBsUFESbNm0K30+cOJHFixfz9ddfF+lF+W+DBw9mwIABALzyyivMnDmTrVu3cscdd1wyNm9vb8aPH1/4vlGjRmzatInPP/+8MNGZNGkSzz77LE899VRhuQ4dOgCwatUqtm7dym+//ca11+b/bdO4cePL/ptcKZ1ZpcYxe3nyvw+244H3trA94SwP/msLXwzrTLg9/4rn0TOZhdNId20WwuiezUu970duaMzGg6dYu/ckw+fv4OvhXUp18imL1CwH477ezaLtxwBoFubP092upUFQfkITYPW+5JWd8Xe15G/zt/P2mgN0bxnGdeGVl4yJSPUpmIwg09OOt6dGkotIydq3b1/kfXp6OuPGjWPp0qUkJiaSm5tLZmYmCQkJl9xP69atC3/39fXFbreTnJxcqhhmz57NvHnzSEhIIDMzk5ycHK6//noAkpOTOX78OF27di122507d1K/fv3CJKeqKNGRGsnm48X7gzvS/91NxCWl8cC/tvB/j3QgIxce/Xg7Kek5NA+3M3PAn/D0KP0DpTw8TLzZrw09ZsRyIDmdCf/ew2v3tL78hqW04UAKzy38mePnsvAwwbCbr+GpblFlGoLWo1UYtzcPZcWeEzz/xS4WP9EZL/0RJOJ2fJz5iY7ToosZIpXN6u3JngkxJa53Op2kpabhb/evlKFrV8rX17fI+5EjR7Jy5UrefPNNmjRpgtVq5d577yUn59JD5L29iw6TNZlMOJ3Oy37+Z599xsiRI5k6dSqdOnXC39+fN954gy1btgBgtVovuf3l1lcWJTpSYwXYvPno4Y7cO2cTCaczGPLBNowsDw6knifMbmHe4A74msv+XzjYz8z0/tcz8F9b+OzHI3RuEsxdbepeUaxZjjxeXx7H+xsOAdCwto2p/drQPvLSY16LYzKZmHR3Szb/fopfjp3jvfXxDLv5miuKT0RqmDwHPs4sAJzm4u9DFJGKYzKZLjmCw+l0kuvjic3Hq8ITnbLw8fEhL6/ke4kKbNiwgcGDB9OnTx8gv4fn0KFDlRbXhg0b6Ny5M0888UThsoMHDxb+7u/vT2RkJKtXr+aWW265aPvWrVtz9OhR9u3bV6W9OrpMLDVaiN3CJ0OjCfE3sy85nf2pHvnTSA9uT1iApdz77dwkmOG3NAHgxUW/kHAqo9z72nX0LHfOjC1Mcu6PjmDZ328sV5JTIMRu4eULQ/LeWrmP30+ml3tfIlIDZZ0t/NXDevFskSJydYqMjGTLli0cOnSIlJSUEntboqKiWLRoETt37uTnn3/m/vvvL1XPTHlFRUXx008/8d1337Fv3z5Gjx7Njz/+WKTMuHHjmDp1KjNnzmT//v1s376dWbNmAXDzzTdz0003cc8997By5Uri4+P59ttvWb58eaXFDEp0xAVE1Lbx8dBoAqxemDCYfl9rWtS98j8MnuoaRYfIWqRn5/Lk/20nJ7dsJwhHnpPpq/bR5+2NHDx5njr+Zt4f0oFX+rQqV0/Tf+vXrj43RgWTnevkhS934XTq+ToibuNCopNq2PCzlf+ijYi4l5EjR+Lp6Unz5s2pU6dOiffcTJs2jVq1atG5c2d69epFTEwMbdu2rbS4/vrXv9K3b1/69+9PdHQ0p06dKtK7A/DQQw8xffp03n77bVq0aEHPnj3Zv39/4fovv/ySDh06MGDAAJo3b87zzz9fqt6rK2EyXODphKmpqQQEBHDu3Dns9rKPZXY4HCxbtowePXpcNDZRXMeJs+f55rtVDOpbcfV4/Gwm3WfEci7Twe3NQ2nXsBZ2qzd2izd2q9eFn97YLV74W7zx8cq/NnAgOZ1nP9/Jz0fPAXBnq3Am3d2SWr4+FRJXgSOnM4iZvo6MnDwm9G7BoE6RFbr/6qDvo2u50vOvu7rSf5fc+I14fdidBGcd3rhuIbMG/KkSopSqoHNazZOVlUV8fDyNGjXCYindhQSn00lqaip2u71ah67Jf1yqHkt7DtY9OuIygnx9CK7gC591A628cW9rHvt4Gyv2nGDFnhOXLG/19sRu9eJMhoOcXCd2ixcT727JXW3qlmqO/LJqEGTjhTuaMfbr3bz+bRy3Nguhfq2r+ynHIm4h8wxQ8LBQNcUiIpVBKatc9W5vEca/HmrP4M6R9G1bj27XhdKxURDNwvypF2jF/w/D0DIdeZxIzSYn18mNUcF898xN9L6+XqUkOQUe/J+GtG9Yi/M5eYxa9Asu0AkrIpdzYeiaHhYqIjXBsGHD8PPzK/Y1bNiw6g6v3HQZSQToel0oXa8LLXF9ntMgPSuX1CwH5zIdmEzQPNxeqQlOAQ8PE6/f25ruM2KJ3Z/CF9uO0q99g0r/XBGpPKaCREcPCxWRGmDChAmMHDmy2HWuPGxZZ1eRUvD0MBFg8ybA5k11pBjX1PHj6W5RTFm+l4nf7OHmpnUI8b86bmB25DnxMJnK9LwkkRqvYOia4Yvdoh4dEaleISEhhISEVHcYFU5D10RcxGM3NqZlPTupWbmMWbK7usOpdIZhsODHBNpPWsXtb/3A4VPnqzskkYqTlT+RyTk0dE1EpLIo0RFxEV6eHky5pw1eHiaW705i2S+J1R1SpTmUcp77527hhS9/4Vymg4Mnz9P37Y3sOnq2ukMTqRj+YeyhMYeNUA1dExGpJEp0RFxI87p2Hv/zNQCM+epXzpzPqeaIKlZunpN31h4kZvo6Nv1+Cou3B8/FNKVFXTunzufwl3c3s2ZvcnWHKXLFnJ3+zoPGRD7Pu0VD10REKokuI4m4mOG3NmH5r0nsT05n4jd7mNb/+ivan2EYnMt0cPhUBodPZ5Bw6jwJpzM4lZ5DTIsw7m1XH48quD/m12PneOHLXew+ngrADU2CeaVPKyJq2xjUqSGPf7Kd9QdSeOTDn3itbytNyCAuL/PCc/ICrGqKRUQqg86uIi7G7OXJ6/e25p53NrJoxzHq2M2E+Fvw8TTh7emBt6cHPl4FP4suy8rJ4/DpDA6fyuDI6QwOnz7P4VMZpGXlFvtZq+OSWfDTESb3aUmzsMqZdSUzJ4/pq/bx3vp48pwGAVZvRvdszj1t/zNtt7/Fm3mDO/DCl7tYvOMYz32xixOpWfztliZVMvOdSEUzDIPMC1879eiIiFQOJToiLqhtRC2GdG7EvA3x/O8Pv1fIPkPtZiKCbEQE+dKwto08p8Hc2N/ZdvgMd85cz8NdInm627X4mivutLHxQAqjFv/C4VMZAPRsHc7YXi2o42++qKyPlwfT7mtDWICFd9Ye5M0V+0hKzWL8XS01I5u4nPM5eRj8J5EXEakIkZGRPP300zz99NPVHUqNoERHxEU9f0dTLN4enEjNxpHnLHzl5Bnk5ObhyDPy3+cWLHfi7elBRJCNhkE2Imr75v9e20aDWjasPp4XfcZfOjZgwr/38O2vScyNjeebXYmM7dWCmBahV9STkpELoxbv5ovtxwAID7AwsXdLujUv+VlGACaTiRfuaEaY3cK4f+/mk80JJKdmM3PAn7B4Xxx/dTEMgz2JqTQK9sXmo9OsXKygF9Xb04TFW7fLiohUBrXAIi7K4u3J83c0q9TPCA+w8s4D7VgTl8yYr3/lyOlMhn2yjVubhTD+rhY0CLKVel+ZOXn8eOg06/Yls2CnJ2mO/CRnUKeGPBfTtExXtR/qHEmIv5mnFuxkxZ4T3D93M/96qAO1fH3KfIwV7Zej55i4dA9b409Tx9/MyNuv5d52DdTrJEWkZTkA8Ld4afiliEgl0WUkEbmsW5qFsOLpmxl+SxO8PU18H5fMbW/9wOw1B8jJdRa7TW6ekx0JZ5i95gAD3t1Mm/ErGDRvK++tP0Saw0TjYF++GNaJCb1blmvoTvdW4XwyNBq7xYvtCWe5Z85GjpzOuNJDLbfEc5mM+Hwnvf65nq3xpwE4mZbNC1/+wp0zY1m/P6XaYpOaJ/VCj46/WcPWRCTfu+++S926dXE6i7arvXv35uGHH+bgwYP07t2b0NBQ/Pz86NChA6tWrSr3502bNo1WrVrh6+tLgwYNeOKJJ0hPTy9SZsOGDfz5z3/GZrNRq1YtYmJiOHMm/4HHTqeTKVOm0KRJE8xmMxEREUyePLnc8VQG9eiISKlYfTwZGdOUu/9Uj9FLfmXT76d447u9LNp+lEl3t+J/Ggfxe8p5NhxIYf3+FDb9fuqiSQ7qBljodE0QttQjvDCwE37Wi+/FKYuOjYL44vHODJ63ld9PnqfvOxv5YEgHWtQNuKL9lsX57Fz+d93vvLvuIFmO/Map75/q8VS3KFbuOcHM1fuJS0rjgX9t4dZmIbzYoxlNQvyrLD6pmQq+G3bNuCZSNQwDHJe4GOZ05q/P8QSPCu4H8LZBKXpu+/Xrx5NPPsmaNWvo2rUrAKdPn2b58uUsW7aM9PR0evToweTJkzGbzXz00Uf06tWLvXv3EhERUeawPDw8mDlzJo0aNeL333/niSee4Pnnn+ftt98GYOfOnXTt2pWHH36YGTNm4OXlxZo1a8jLy58yctSoUcydO5e33nqLG264gcTEROLi4socR2Uq1xl29uzZvPHGGyQlJdGmTRtmzZpFx44dSyy/cOFCRo8ezaFDh4iKiuL111+nR48e5Q5aRKpPkxA/5j8azVc7jzNp6R4OnjzPgLmbCfYzk5KeXaSs3eJF52uC6dKkNl2aBNMo2Jfc3FyWLUvA7FUxDcm1of4seqILg9/fSlxSGve8s5Hm4Xbq17JRv5b1Dz+t1A20Vti9PHlOgy+3H+XN7/aSnJZ/3B0jg3i553W0rh8IwCM3NuaetvWZ+f1+Pt50mO/jkvlh30nu7xjB092iqO13ZYmeuK7CHh09LFSkajgy4JW6Ja72AAIr67NfPA4+vpctVqtWLbp37878+fMLE50vvviC4OBgbrnlFjw8PGjTpk1h+YkTJ7J48WK+/vprhg8fXuaw/jhhQWRkJJMmTWLYsGGFic6UKVNo37594XuAFi1aAJCWlsaMGTP45z//yUMPPQTANddcww033FDmOCpTmc+wCxYsYMSIEcyZM4fo6GimT59OTEwMe/fuJSQk5KLyGzduZMCAAbz66qv07NmT+fPnc/fdd7N9+3ZatmxZIQchIlXLZDJx95/qcUvTEN5YEcenWxJISc/Gx8uDDpG16HxNMDc0CaZlvYAquTclLMDC58M6MezjbWw8eIrtCWfZnnC22LJ1/M1FEqCIIBuRtX1pXMeXEH9zqe6X2HgwhUnf/MaexPxn/kQE2XixRzNiWoRdtH0tXx/G9mrBg//TkFe/jWPlnhN8vPkwS3YcY/itTXioc2Spki/DMEhJzyHhdAYnUrOwW7wJsZup42cm0Oat+zxcTOE9OhU4i6GIuL6BAwfy6KOP8vbbb2M2m/n000/5y1/+goeHB+np6YwbN46lS5eSmJhIbm4umZmZJCQklOuzVq1axauvvkpcXBypqank5uaSlZVFRkYGNpuNnTt30q9fv2K3/e2338jOzi5MyGqqMp9hp02bxqOPPsqQIUMAmDNnDkuXLmXevHn84x//uKj8jBkzuOOOO3juueeA/Oxz5cqV/POf/2TOnDlXGL6IVKcAmzeT7m7FkC6NOJmWzfUNAqtt9jO7xZtPhkaz+3gqR85kcPRMBkfPZHL0TCbHzmRy5EwGGTl5nEzL5mRaNjuKSYRsPp40rO1Lo2AbjYJ9iaztm/8z2Jfavj7Ep5znlWVxrPrtBJB/Nf7vt0YxqHNDzF6XPu7GdfyYO6h9kSTp1W/j+HjzYf7RvRl3tgony+Hk6JkMEk7/53XkdAZHTmeScDqDTEdesfv28fSgjr+ZYH8zIRdedfzzn69U58L75nXteHvqtsya4j9D13SPjkiV8Lbl96yUwOl0kpqWht3fH4/KGLpWSr169cIwDJYuXUqHDh2IjY3lrbfeAmDkyJGsXLmSN998kyZNmmC1Wrn33nvJyckpc0iHDh2iZ8+ePP7440yePJmgoCDWr1/P0KFDycnJwWazYbVaS9z+UutqkjIlOjk5OWzbto1Ro0YVLvPw8KBbt25s2rSp2G02bdrEiBEjiiyLiYlhyZIlJX5OdnY22dn/GQKTmpp/1dThcOBwOMoScuF2f/wprkn1WHNFBJqJCDQDThyO4icnKFDZ9dgs1Eaz0IsbFcMwOJvp4NiZLI6ezeTY2fwkKOF0BvEpGRw7m0lGTh6/Jaby24Wemj/yM3uR5cgj12ng6WHi/g71GX7LNQT5+oBx+eMu0CEigEXDolmy8zhvrTrA0TOZDJ+/gxctvxQOZyqJyQRhdgthdjNpWbmcTM/mXGYuOXlOjl04ppJsf+nWcg2T0vetchTUtV1D10Sqhsl06eFjTid45+WXqehEpwwsFgt9+/bl008/5cCBAzRt2pS2bdsC+RMDDB48mD59+gCQnp7OoUOHyvU527Ztw+l0MnXq1MLE7vPPPy9SpnXr1qxevZrx48dftH1UVBRWq5XVq1fzyCOPlCuGqlCmM2xKSgp5eXmEhhZ91kVoaGiJNx8lJSUVWz4pKanEz3n11VeL/UddsWIFNlvps+L/tnLlynJvKzWH6tE9VHc9hgPhHtAhGAiGXCecyoaTWSZOZl74mQUnM02czYH07Pw/TFvUctK7oZNQj3g2/xBf7s+3As9eB98fN/H9cY/CP3wtngbBFqhtNqhthtoWg9oX3geZwcsjHfjPrDi5Tkh1QGoOpDpMRX6mOeBcjonMPFi3ekVp7oW9SEZG9c1k584Khq75aeiaiPyXgQMH0rNnT3bv3s0DDzxQuDwqKopFixbRq1cvTCYTo0ePvmiGttJq0qQJDoeDWbNm0atXLzZs2HDRSKtRo0bRqlUrnnjiCYYNG4aPjw9r1qyhX79+BAcH88ILL/D888/j4+NDly5dOHnyJLt372bo0KFXdPwVqUaeYUeNGlWkFyg1NZUGDRpw++23Y7fby7w/h8PBypUrue222/D21jABV6V6dA+uWI9ZjjyOnM4EE0SF+FXovvsAZzJyOH42i3qBVgKsNeu5KgU96lKxHr2hEbXPJ9CrdXh1hyIiNcytt95KUFAQe/fu5f777y9cPm3aNB5++GE6d+5cmGiU9xzdpk0bpk2bxuuvv86oUaO46aabePXVVxk0aFBhmWuvvZYVK1bw4osv0rFjR6xWK9HR0QwYMACA0aNH4+XlxZgxYzh+/Djh4eEMGzbsyg6+gpUp0QkODsbT05MTJ04UWX7ixAnCwsKK3SYsLKxM5QHMZjNm88WzEXl7e1/RH0ZXur3UDKpH9+BK9ejt7U1zm6XS9h8S4E1IwOVn5KkOrlJHrqZhbRtNAw0a1i7/KAURcU8eHh4cP37x/USRkZF8//33RZb97W9/K/K+LEPZnnnmGZ555pkiyx588MEi72+++WY2bNhQYpwvvfQSL730Uqk/s6qVaRCij48P7dq1Y/Xq1YXLnE4nq1evplOnTsVu06lTpyLlIX/ISknlRURERERErlSZ77YaMWIEc+fO5cMPP+S3337j8ccf5/z584WzsA0aNKjIZAVPPfUUy5cvZ+rUqcTFxTFu3Dh++umncs33LSIiIiIil/bpp5/i5+dX7KvgWThXgzLfo9O/f39OnjzJmDFjSEpK4vrrr2f58uWFEw4kJCQUmZavc+fOzJ8/n5dffpkXX3yRqKgolixZomfoiIiIiIhUgrvuuovo6Ohi111NQ5LLNRnB8OHDS+yRWbt27UXL+vXrV+IDh0REREREpOL4+/vj7+9f3WFUOz09TkRERERE3I4SHRERERFxO4ZhVHcIcgUqov6U6IiIiIiI2yi4B0UPPHZtBfV3JfcU1cgHhoqIiIiIlIenpyeBgYEkJycDYLPZLvsgZqfTSU5ODllZWUUm1ZKqZxgGGRkZJCcnExgYiKenZ7n3pURHRERERNxKwYPpC5KdyzEMg8zMTKxW62WTIqkagYGBhfVYXkp0RERERMStmEwmwsPDCQkJweFwXLa8w+Fg3bp13HTTTVfV9Ms1lbe39xX15BRQoiMiIiIibsnT07NUfzB7enqSm5uLxWJRouNGNAhRRERERETcjhIdERERERFxO0p0RERERETE7bjEPToFDwxKTU0t1/YOh4OMjAxSU1M17tKFqR7dg+rRtRScd/XgvaLULkkB1aV7UD26ltK2TS6R6KSlpQHQoEGDao5EROTqlJaWRkBAQHWHUWOoXRIRqX6Xa5tMhgtcpnM6nRw/fhx/f/9yzW2emppKgwYNOHLkCHa7vRIilKqgenQPqkfXYhgGaWlp1K1bVw/R+wO1S1JAdekeVI+upbRtk0v06Hh4eFC/fv0r3o/dbtd/XjegenQPqkfXoZ6ci6ldkv+munQPqkfXUZq2SZfnRERERETE7SjRERERERERt3NVJDpms5mxY8diNpurOxS5AqpH96B6FNH3wJ2oLt2D6tE9ucRkBCIiIiIiImVxVfToiIiIiIjI1UWJjoiIiIiIuB0lOiIiIiIi4nauqkTHZDKxZMmS6g5DrpDq0T0dOnQIk8nEzp07qzsUkSqlc5rrUx26L7VNrs3tEp3Zs2cTGRmJxWIhOjqarVu3VndIUkbjxo3DZDIVeTVr1qy6w5LLWLduHb169aJu3brFNvqGYTBmzBjCw8OxWq1069aN/fv3V0+wIlVMbZNrU7vkutQ2Xd3cKtFZsGABI0aMYOzYsWzfvp02bdoQExNDcnJydYcmZdSiRQsSExMLX+vXr6/ukOQyzp8/T5s2bZg9e3ax66dMmcLMmTOZM2cOW7ZswdfXl5iYGLKysqo4UpGqpbbJPahdck1qm65ubpXoTJs2jUcffZQhQ4bQvHlz5syZg81mY968ecWWHzt2LOHh4ezatauKI5XL8fLyIiwsrPAVHBxcYlnVY83QvXt3Jk2aRJ8+fS5aZxgG06dP5+WXX6Z37960bt2ajz76iOPHj5c43CMvL4+HH36YZs2akZCQUMnRi1QetU3uQe2Sa1LbdHVzm0QnJyeHbdu20a1bt8JlHh4edOvWjU2bNhUpaxgGTz75JB999BGxsbG0bt26qsOVy9i/fz9169alcePGDBw4sNiTierRdcTHx5OUlFTk+xkQEEB0dPRF30+A7Oxs+vXrx86dO4mNjSUiIqIqwxWpMGqb3IfaJfejtsn9eVV3ABUlJSWFvLw8QkNDiywPDQ0lLi6u8H1ubi4PPPAAO3bsYP369dSrV6+qQ5XLiI6O5oMPPqBp06YkJiYyfvx4brzxRn799Vf8/f0B1aOrSUpKAij2+1mwrkB6ejp33nkn2dnZrFmzhoCAgCqLU6SiqW1yD2qX3JPaJvfnNolOaT3zzDOYzWY2b958yW5nqT7du3cv/L1169ZER0fTsGFDPv/8c4YOHQqoHt3ZgAEDqF+/Pt9//z1Wq7W6wxGpEjqn1Wxql0Rtk2tym6FrwcHBeHp6cuLEiSLLT5w4QVhYWOH72267jWPHjvHdd99VdYhSToGBgVx77bUcOHCgcJnq0bUUfAcv9/0E6NGjB7t27Sp22ICIq1Hb5J7ULrkHtU3uz20SHR8fH9q1a8fq1asLlzmdTlavXk2nTp0Kl911113Mnz+fRx55hM8++6w6QpUySk9P5+DBg4SHhxcuUz26lkaNGhEWFlbk+5mamsqWLVuKfD8BHn/8cV577TXuuusufvjhh6oOVaRCqW1yT2qX3IPapquA4UY+++wzw2w2Gx988IGxZ88e47HHHjMCAwONpKQkwzAMAzAWL15sGIZhLFy40LBYLMbChQurMWIpzrPPPmusXbvWiI+PNzZs2GB069bNCA4ONpKTkw3DUD3WVGlpacaOHTuMHTt2GIAxbdo0Y8eOHcbhw4cNwzCM1157zQgMDDS++uorY9euXUbv3r2NRo0aGZmZmYZhGEZ8fLwBGDt27DAMwzDeeustw8/Pz4iNja2uQxKpEGqbXJ/aJdeltunq5laJjmEYxqxZs4yIiAjDx8fH6Nixo7F58+bCdX88ERmGYSxYsMCwWCzGl19+WQ2RSkn69+9vhIeHGz4+Pka9evWM/v37GwcOHChcr3qsmdasWWMAF70eeughwzAMw+l0GqNHjzZCQ0MNs9lsdO3a1di7d2/h9v/dmBiGYUydOtXw9/c3NmzYUMVHI1Kx1Da5NrVLrktt09XNZBiGUXX9RyIiIiIiIpXPbe7RERERERERKaBER0RERERE3I4SHRERERERcTtKdERERERExO0o0REREREREbejREdERERERNyOEh0REREREXE7SnRERERERMTtKNERqSCDBw/m7rvvru4wREREALVLIkp0RERERETE7SjRESmjL774glatWmG1WqlduzbdunXjueee48MPP+Srr77CZDJhMplYu3YtAEeOHOG+++4jMDCQoKAgevfuzaFDhwr3V3DFbfz48dSpUwe73c6wYcPIycmpngMUERGXonZJpHhe1R2AiCtJTExkwIABTJkyhT59+pCWlkZsbCyDBg0iISGB1NRU3n//fQCCgoJwOBzExMTQqVMnYmNj8fLyYtKkSdxxxx3s2rULHx8fAFavXo3FYmHt2rUcOnSIIUOGULt2bSZPnlydhysiIjWc2iWRkinRESmDxMREcnNz6du3Lw0bNgSgVatWAFitVrKzswkLCyss/8knn+B0OnnvvfcwmUwAvP/++wQGBrJ27Vpuv/12AHx8fJg3bx42m40WLVowYcIEnnvuOSZOnIiHhzpeRUSkeGqXREqm/6kiZdCmTRu6du1Kq1at6NevH3PnzuXMmTMllv/55585cOAA/v7++Pn54efnR1BQEFlZWRw8eLDIfm02W+H7Tp06kZ6ezpEjRyr1eERExLWpXRIpmXp0RMrA09OTlStXsnHjRlasWMGsWbN46aWX2LJlS7Hl09PTadeuHZ9++ulF6+rUqVPZ4YqIiJtTuyRSMiU6ImVkMpno0qULXbp0YcyYMTRs2JDFixfj4+NDXl5ekbJt27ZlwYIFhISEYLfbS9znzz//TGZmJlarFYDNmzfj5+dHgwYNKvVYRETE9aldEimehq6JlMGWLVt45ZVX+Omnn0hISGDRokWcPHmS6667jsjISHbt2sXevXtJSUnB4XAwcOBAgoOD6d27N7GxscTHx7N27Vr+/ve/c/To0cL95uTkMHToUPbs2cOyZcsYO3Ysw4cP1zhoERG5JLVLIiVTj45IGdjtdtatW8f06dNJTU2lYcOGTJ06le7du9O+fXvWrl1L+/btSU9PZ82aNfz5z39m3bp1vPDCC/Tt25e0tDTq1atH165di1xJ69q1K1FRUdx0001kZ2czYMAAxo0bV30HKiIiLkHtkkjJTIZhGNUdhMjVbPDgwZw9e5YlS5ZUdygiIiJql8RtqP9RRERERETcjhIdERERERFxOxq6JiIiIiIibkc9OiIiIiIi4naU6IiIiIiIiNtRoiMiIiIiIm5HiY6IiIiIiLgdJToiIiIiIuJ2lOiIiIiIiIjbUaIjIiIiIiJuR4mOiIiIiIi4HSU6IiIiIiLidv4fib6ZF34U870AAAAASUVORK5CYII=\n"
          },
          "metadata": {}
        }
      ],
      "source": [
        "plot_learning_curves(history, sample_step=500)  #横坐标是 steps"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 22,
      "metadata": {
        "ExecuteTime": {
          "end_time": "2025-06-26T01:45:37.818553Z",
          "start_time": "2025-06-26T01:45:37.816716Z"
        },
        "id": "hEf0XLgRaGiX"
      },
      "outputs": [],
      "source": [
        "# 创建一个SevenZipFile对象，用于读取'./test.7z'压缩包\n",
        "a = py7zr.SevenZipFile(r'./test.7z', 'r')\n",
        "# 将压缩包中的所有文件解压到'./competitions/cifar-10/'目录下\n",
        "a.extractall(path=r'./competitions/cifar-10/')\n",
        "# 关闭SevenZipFile对象，释放资源\n",
        "a.close()"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 23,
      "metadata": {
        "id": "yek4MjF_aGiX",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "45f72c05-e01f-44ad-d0fa-d53b3f684efd"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
            "  warnings.warn(\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "正在预测测试集...\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stderr",
          "text": [
            "\r预测进度:   0%|          | 0/2344 [00:00<?, ?it/s]/usr/local/lib/python3.11/dist-packages/torch/utils/data/dataloader.py:624: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
            "  warnings.warn(\n",
            "预测进度: 100%|██████████| 2344/2344 [01:57<00:00, 19.91it/s]\n"
          ]
        },
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "id列是否有重复值: False\n",
            "预测完成，结果已保存至 cifar10_submission.csv\n"
          ]
        }
      ],
      "source": [
        "# 导入所需库\n",
        "import os\n",
        "import pandas as pd\n",
        "from PIL import Image\n",
        "import torch\n",
        "from torch.utils.data import Dataset, DataLoader\n",
        "from torchvision import transforms\n",
        "import tqdm\n",
        "\n",
        "# 定义测试数据集类\n",
        "class CIFAR10TestDataset(Dataset):\n",
        "    def __init__(self, img_dir, transform=None):\n",
        "        \"\"\"\n",
        "        初始化测试数据集\n",
        "\n",
        "        参数:\n",
        "            img_dir: 测试图片目录\n",
        "            transform: 图像预处理变换\n",
        "        \"\"\"\n",
        "        self.img_dir = img_dir\n",
        "        self.transform = transform\n",
        "        self.img_files = [f for f in os.listdir(img_dir) if f.endswith('.png')]\n",
        "\n",
        "    def __len__(self):\n",
        "        return len(self.img_files)\n",
        "\n",
        "    def __getitem__(self, idx):\n",
        "        img_path = os.path.join(self.img_dir, self.img_files[idx])\n",
        "        image = Image.open(img_path).convert('RGB')\n",
        "\n",
        "        if self.transform:\n",
        "            image = self.transform(image)\n",
        "\n",
        "        # 提取图像ID（文件名去掉扩展名）\n",
        "        img_id = int(os.path.splitext(self.img_files[idx])[0])\n",
        "\n",
        "        return image, img_id\n",
        "\n",
        "# 定义预测函数\n",
        "def predict_test_set(model, img_dir, labels_file, device, batch_size=64):\n",
        "    \"\"\"\n",
        "    预测测试集并生成提交文件\n",
        "\n",
        "    参数:\n",
        "        model: 训练好的模型\n",
        "        img_dir: 测试图片目录\n",
        "        labels_file: 提交模板文件路径\n",
        "        device: 计算设备\n",
        "        batch_size: 批处理大小\n",
        "    \"\"\"\n",
        "    # 图像预处理变换（与训练集相同）\n",
        "    transform = transforms.Compose([\n",
        "        transforms.ToTensor(),\n",
        "        transforms.Normalize((0.4917, 0.4823, 0.4467), (0.2024, 0.1995, 0.2010))\n",
        "    ])\n",
        "\n",
        "    # 创建测试数据集和数据加载器\n",
        "    test_dataset = CIFAR10TestDataset(img_dir, transform=transform)\n",
        "    test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=4)\n",
        "\n",
        "    # 设置模型为评估模式\n",
        "    model.eval()\n",
        "\n",
        "    # 读取提交模板\n",
        "    submission_df = pd.read_csv(labels_file)\n",
        "    predictions = {}\n",
        "\n",
        "    # 使用tqdm显示进度条\n",
        "    print(\"正在预测测试集...\")\n",
        "    with torch.no_grad():\n",
        "        for images, img_ids in tqdm.tqdm(test_loader, desc=\"预测进度\"):\n",
        "            images = images.to(device)\n",
        "            outputs = model(images)\n",
        "            _, predicted = torch.max(outputs, 1) #取最大的索引，作为预测结果\n",
        "\n",
        "            # 记录每个图像的预测结果\n",
        "            for i, img_id in enumerate(img_ids):\n",
        "                predictions[img_id.item()] = predicted[i].item() #因为一个批次有多个图像，所以需要predicted[i]\n",
        "\n",
        "    # 定义类别名称\n",
        "    class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n",
        "\n",
        "    # 将数值标签转换为类别名称\n",
        "    labeled_predictions = {img_id: class_names[pred] for img_id, pred in predictions.items()}\n",
        "\n",
        "    # 直接创建DataFrame\n",
        "    submission_df = pd.DataFrame({\n",
        "        'id': list(labeled_predictions.keys()),\n",
        "        'label': list(labeled_predictions.values())\n",
        "    })\n",
        "    # 按id列排序\n",
        "    submission_df = submission_df.sort_values(by='id')\n",
        "\n",
        "    # 检查id列是否有重复值\n",
        "    has_duplicates = submission_df['id'].duplicated().any()\n",
        "    print(f\"id列是否有重复值: {has_duplicates}\")\n",
        "    # 保存预测结果\n",
        "    output_file = 'cifar10_submission.csv'\n",
        "    submission_df.to_csv(output_file, index=False)\n",
        "    print(f\"预测完成，结果已保存至 {output_file}\")\n",
        "\n",
        "# 执行测试集预测\n",
        "img_dir = r\"competitions/cifar-10/test\"\n",
        "labels_file = r\"./sampleSubmission.csv\"\n",
        "predict_test_set(model, img_dir, labels_file, device, batch_size=128)\n"
      ]
    }
  ],
  "metadata": {
    "accelerator": "GPU",
    "colab": {
      "gpuType": "T4",
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.12.3"
    },
    "widgets": {
      "application/vnd.jupyter.widget-state+json": {
        "14786a5793af4a6a8adab22fafce3819": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HBoxModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HBoxModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HBoxView",
            "box_style": "",
            "children": [
              "IPY_MODEL_5a4fcda909bb4a5b9de631c64811c7db",
              "IPY_MODEL_9106c42ddbfb422198f28c7abfd448bc",
              "IPY_MODEL_37949ba29bd54c389b3bd85bf59e70f9"
            ],
            "layout": "IPY_MODEL_74a2212a3ba142ff853abbc47ee5ab50"
          }
        },
        "5a4fcda909bb4a5b9de631c64811c7db": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_8b8f8329abcc42569b05a078f5f4aff3",
            "placeholder": "​",
            "style": "IPY_MODEL_a9e94152f03d4ad38cd6c487e880a5c2",
            "value": " 40%"
          }
        },
        "9106c42ddbfb422198f28c7abfd448bc": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "FloatProgressModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "FloatProgressModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "ProgressView",
            "bar_style": "danger",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_f1550c7347964f4691f55dcdc3ba4291",
            "max": 35200,
            "min": 0,
            "orientation": "horizontal",
            "style": "IPY_MODEL_0c37256bf2f6455697b1ad8380f534c9",
            "value": 14000
          }
        },
        "37949ba29bd54c389b3bd85bf59e70f9": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "HTMLModel",
          "model_module_version": "1.5.0",
          "state": {
            "_dom_classes": [],
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "HTMLModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/controls",
            "_view_module_version": "1.5.0",
            "_view_name": "HTMLView",
            "description": "",
            "description_tooltip": null,
            "layout": "IPY_MODEL_d3620964d5e74ddc9b5540cb8bcd396a",
            "placeholder": "​",
            "style": "IPY_MODEL_020c86af32014f5f84dff06106b0b6a8",
            "value": " 14000/35200 [12:38&lt;16:45, 21.09it/s, epoch=19, loss=0.0270, acc=98.44%]"
          }
        },
        "74a2212a3ba142ff853abbc47ee5ab50": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "8b8f8329abcc42569b05a078f5f4aff3": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "a9e94152f03d4ad38cd6c487e880a5c2": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        },
        "f1550c7347964f4691f55dcdc3ba4291": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "0c37256bf2f6455697b1ad8380f534c9": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "ProgressStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "ProgressStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "bar_color": null,
            "description_width": ""
          }
        },
        "d3620964d5e74ddc9b5540cb8bcd396a": {
          "model_module": "@jupyter-widgets/base",
          "model_name": "LayoutModel",
          "model_module_version": "1.2.0",
          "state": {
            "_model_module": "@jupyter-widgets/base",
            "_model_module_version": "1.2.0",
            "_model_name": "LayoutModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "LayoutView",
            "align_content": null,
            "align_items": null,
            "align_self": null,
            "border": null,
            "bottom": null,
            "display": null,
            "flex": null,
            "flex_flow": null,
            "grid_area": null,
            "grid_auto_columns": null,
            "grid_auto_flow": null,
            "grid_auto_rows": null,
            "grid_column": null,
            "grid_gap": null,
            "grid_row": null,
            "grid_template_areas": null,
            "grid_template_columns": null,
            "grid_template_rows": null,
            "height": null,
            "justify_content": null,
            "justify_items": null,
            "left": null,
            "margin": null,
            "max_height": null,
            "max_width": null,
            "min_height": null,
            "min_width": null,
            "object_fit": null,
            "object_position": null,
            "order": null,
            "overflow": null,
            "overflow_x": null,
            "overflow_y": null,
            "padding": null,
            "right": null,
            "top": null,
            "visibility": null,
            "width": null
          }
        },
        "020c86af32014f5f84dff06106b0b6a8": {
          "model_module": "@jupyter-widgets/controls",
          "model_name": "DescriptionStyleModel",
          "model_module_version": "1.5.0",
          "state": {
            "_model_module": "@jupyter-widgets/controls",
            "_model_module_version": "1.5.0",
            "_model_name": "DescriptionStyleModel",
            "_view_count": null,
            "_view_module": "@jupyter-widgets/base",
            "_view_module_version": "1.2.0",
            "_view_name": "StyleView",
            "description_width": ""
          }
        }
      }
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}